{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"llama3","owner":"meta-llama","isFork":false,"description":"The official Meta Llama 3 GitHub site","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":20,"issueCount":97,"starsCount":21423,"forksCount":2143,"license":"Other","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,23,3,75,4,1,1,5,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-02T02:10:37.552Z"}},{"type":"Public","name":"llama-recipes","owner":"meta-llama","isFork":false,"description":"Scripts for fine-tuning Meta Llama3 with composable FSDP & PEFT methods to cover single/multi-node GPUs. Supports default & custom datasets for applications such as summarization and Q&A. Supporting a number of candid inference solutions such as HF TGI, VLLM for local or cloud deployment. Demo apps to showcase Meta Llama3 for WhatsApp & Messenger.","allTopics":["python","machine-learning","ai","llama","finetuning","llm","langchain","vllm","llama2","pytorch"],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":44,"issueCount":100,"starsCount":9952,"forksCount":1400,"license":null,"participation":[0,0,0,0,0,0,41,40,51,25,7,6,67,49,26,6,16,30,21,30,6,35,12,14,7,5,34,22,30,20,4,7,9,14,4,43,21,3,21,6,13,12,5,2,15,59,59,40,34,63,8,6],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T18:06:24.861Z"}},{"type":"Public","name":"PurpleLlama","owner":"meta-llama","isFork":false,"description":"Set of tools to assess and improve LLM security.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":2062,"forksCount":326,"license":"Other","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,17,5,0,0,0,4,1,1,1,4,0,5,3,8,7,1,16,19,20,40,7,6,3,1,2,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T14:59:48.512Z"}},{"type":"Public","name":"codellama","owner":"meta-llama","isFork":false,"description":"Inference code for CodeLlama models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":7,"issueCount":88,"starsCount":15289,"forksCount":1757,"license":"Other","participation":[0,0,0,0,0,0,0,0,0,0,0,4,2,5,5,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-21T21:14:23.157Z"}},{"type":"Public","name":"llama","owner":"meta-llama","isFork":false,"description":"Inference code for Llama models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":49,"issueCount":316,"starsCount":53721,"forksCount":9258,"license":"Other","participation":[0,0,0,0,0,0,20,0,3,6,0,4,18,21,7,6,3,0,4,5,0,7,4,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,2,2,5,0,0,3,0,0,0,0,2,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-15T03:49:42.993Z"}}],"repositoryCount":5,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}