{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"PicoAIProxy","owner":"PicoMLX","isFork":false,"description":"Reverse proxy for OpenAI and Anthropic written in server-side Swift","allTopics":["macos","swift","ios","openai","server-side-swift","openai-api","anthropic","anthropic-claude","anthropic-ai"],"primaryLanguage":{"name":"Swift","color":"#F05138"},"pullRequestCount":0,"issueCount":1,"starsCount":59,"forksCount":4,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-04T04:39:34.650Z"}},{"type":"Public","name":"PicoVector","owner":"PicoMLX","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Swift","color":"#F05138"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-30T06:02:56.950Z"}},{"type":"Public","name":"PicoMLXServer","owner":"PicoMLX","isFork":false,"description":"The easiest way to run the fastest MLX-based LLMs locally","allTopics":["ai","openai","mlx","openai-api","ollama","llama3"],"primaryLanguage":{"name":"Swift","color":"#F05138"},"pullRequestCount":0,"issueCount":2,"starsCount":183,"forksCount":10,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-27T19:12:51.651Z"}},{"type":"Public","name":"MLXKit","owner":"PicoMLX","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Swift","color":"#F05138"},"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-27T18:07:23.349Z"}}],"repositoryCount":4,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}