{"payload":{"header_redesign_enabled":false,"results":[{"id":"502482803","archived":false,"color":"#3572A5","followers":8771,"has_funding_file":false,"hl_name":"bigscience-workshop/petals","hl_trunc_description":"🌸 Run LLMs at home, BitTorrent-style. Fine-tuning and inference up to 10x faster than offloading","language":"Python","mirror":false,"owned_by_organization":true,"public":true,"repo":{"repository":{"id":502482803,"name":"petals","owner_id":82455566,"owner_login":"bigscience-workshop","updated_at":"2024-04-29T20:13:42.990Z","has_issues":true}},"sponsorable":false,"topics":["nlp","bloom","distributed-systems","machine-learning","deep-learning","chatbot","pytorch","falcon","transformer","neural-networks","llama","gpt","pretrained-models","language-models","volunteer-computing","pipeline-parallelism","guanaco","tensor-parallelism","large-language-models","llama2"],"type":"Public","help_wanted_issues_count":9,"good_first_issue_issues_count":5,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":71,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Abigscience-workshop%252Fpetals%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/bigscience-workshop/petals/star":{"post":"YLUjAs-cRUv5hIQG1rlZ5QH8OlBFj9LZbxpW9wTwoGVrzfBl3w92tt7sKBBP5Vcjgq6cS1jhzEhgr6CKn9V2uQ"},"/bigscience-workshop/petals/unstar":{"post":"zUGeQYh9gY0B9336E0QhuGhkHRqK-ytMfm5Ap6M1gRSGgRggWEcRmffDudCzplRa9ZDBhIB6Lo8qQQPd8g6WFQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"rSAgW67LX2-rVfFu7DkT0EVIFFLS76m_ftBFZQOd9vumppRakwIP_5Dn7RHRMvnYBITwlHnq393r-7XXMd0x0Q"}}},"title":"Repository search results"}