{"payload":{"pageCount":4,"repositories":[{"type":"Public","name":"datasets","owner":"huggingface","isFork":false,"description":"🤗 The largest hub of ready-to-use datasets for ML models with fast, easy-to-use and efficient data manipulation tools","allTopics":["machine-learning","natural-language-processing","computer-vision","deep-learning","tensorflow","numpy","speech","pandas","datasets","hacktoberfest","nlp","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":80,"issueCount":631,"starsCount":18606,"forksCount":2547,"license":"Apache License 2.0","participation":[6,11,6,6,5,12,7,19,6,3,7,7,6,12,5,3,6,1,6,9,10,7,2,14,7,11,6,10,16,4,1,6,1,8,10,13,1,7,17,8,5,6,2,3,9,11,2,4,5,9,3,9],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T05:45:50.909Z"}},{"type":"Public","name":"diffusers","owner":"huggingface","isFork":false,"description":"🤗 Diffusers: State-of-the-art diffusion models for image and audio generation in PyTorch and FLAX.","allTopics":["deep-learning","pytorch","image-generation","flax","hacktoberfest","diffusion","text2image","image2image","jax","score-based-generative-modeling","stable-diffusion","stable-diffusion-diffusers","latent-diffusion-models"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":117,"issueCount":340,"starsCount":23181,"forksCount":4778,"license":"Apache License 2.0","participation":[37,25,16,11,34,38,31,54,41,32,26,50,42,24,49,36,45,32,26,26,38,39,38,45,25,54,37,16,51,42,39,30,39,30,30,41,37,28,44,48,29,33,37,20,25,18,24,32,26,22,20,32],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T05:06:42.243Z"}},{"type":"Public","name":"transformers","owner":"huggingface","isFork":false,"description":"🤗 Transformers: State-of-the-art Machine Learning for Pytorch, TensorFlow, and JAX.","allTopics":["python","seq2seq","flax","language-models","nlp-library","hacktoberfest","jax","pytorch-transformers","model-hub","nlp","machine-learning","natural-language-processing","deep-learning","tensorflow","pytorch","transformer","speech-recognition","pretrained-models","language-model","bert"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":240,"issueCount":854,"starsCount":127027,"forksCount":25158,"license":"Apache License 2.0","participation":[73,72,67,68,29,48,71,79,49,69,63,71,62,62,50,48,41,68,57,52,65,81,61,70,45,37,74,77,48,3,21,53,54,49,54,41,52,48,55,65,67,57,50,43,61,73,79,63,47,61,73,55],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T02:55:05.753Z"}},{"type":"Public","name":"text-generation-inference","owner":"huggingface","isFork":false,"description":"Large Language Model Text Generation Inference","allTopics":["nlp","bloom","deep-learning","inference","pytorch","falcon","transformer","gpt","starcoder"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":11,"issueCount":134,"starsCount":8144,"forksCount":899,"license":"Apache License 2.0","participation":[4,4,6,8,10,16,18,13,13,13,15,3,8,13,3,2,20,14,3,5,4,0,4,2,8,5,2,18,7,0,0,6,2,20,13,7,12,8,13,0,4,12,4,5,18,7,19,19,2,22,13,14],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T01:13:56.340Z"}},{"type":"Public","name":"trl","owner":"huggingface","isFork":false,"description":"Train transformer language models with reinforcement learning.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":17,"issueCount":57,"starsCount":8418,"forksCount":1014,"license":"Apache License 2.0","participation":[19,6,14,2,11,7,9,13,6,6,7,10,14,8,21,10,5,7,10,8,7,15,12,5,6,8,9,9,17,8,4,23,12,9,15,1,9,4,11,4,7,22,5,7,16,12,9,11,3,3,11,3],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-02T23:39:47.221Z"}},{"type":"Public","name":"nanotron","owner":"huggingface","isFork":false,"description":"Minimalistic large language model 3D-parallelism training","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":24,"issueCount":30,"starsCount":858,"forksCount":74,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,1,0,0,0,0,0,0,0,0,0,3,28,8,4,35,102,67,77,65,97,37,26,59,34,18,15,36,49,48,45,4,16,13,3,5],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-02T18:34:38.441Z"}},{"type":"Public","name":"evaluate","owner":"huggingface","isFork":false,"description":"🤗 Evaluate: A library for easily evaluating machine learning models and datasets.","allTopics":["evaluation","machine-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":49,"issueCount":139,"starsCount":1851,"forksCount":229,"license":"Apache License 2.0","participation":[0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,2,1,0,1,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,1,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-02T15:25:51.453Z"}},{"type":"Public","name":"lerobot","owner":"huggingface","isFork":false,"description":"🤗 LeRobot: State-of-the-art Machine Learning for Real-World Robotics in Pytorch","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":20,"issueCount":12,"starsCount":3307,"forksCount":240,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,5,1,5,21,33,55,52,63,59,42,71,56,13,23,19,7,10,15],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T05:39:01.997Z"}},{"type":"Public","name":"peft","owner":"huggingface","isFork":false,"description":"🤗 PEFT: State-of-the-art Parameter-Efficient Fine-Tuning.","allTopics":["python","adapter","transformers","pytorch","lora","diffusion","parameter-efficient-learning","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":9,"issueCount":37,"starsCount":14433,"forksCount":1349,"license":"Apache License 2.0","participation":[5,14,8,16,5,16,12,8,4,7,8,8,9,7,7,7,10,9,9,3,6,9,17,17,10,11,24,17,9,3,4,20,3,5,16,16,14,17,6,9,11,8,5,5,11,5,8,11,5,12,7,7],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-01T20:45:58.359Z"}},{"type":"Public","name":"optimum-tpu","owner":"huggingface","isFork":false,"description":"Google TPU optimizations for transformers models","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":2,"issueCount":1,"starsCount":34,"forksCount":6,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,2,1,2,3,10,2,3,8,0,3,1,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-01T19:54:07.559Z"}},{"type":"Public","name":"datatrove","owner":"huggingface","isFork":false,"description":"Freeing data processing from scripting madness by providing a set of platform-agnostic customizable pipeline processing blocks.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":5,"issueCount":20,"starsCount":1497,"forksCount":92,"license":"Apache License 2.0","participation":[0,13,19,13,41,11,5,11,8,9,0,3,4,4,2,0,0,3,1,19,0,11,3,15,30,7,10,3,11,1,0,1,8,6,11,9,2,11,6,8,1,9,0,3,3,10,6,4,8,4,10,6],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-01T16:10:54.531Z"}},{"type":"Public","name":"huggingface_hub","owner":"huggingface","isFork":false,"description":"The official Python client for the Huggingface Hub.","allTopics":["machine-learning","natural-language-processing","deep-learning","models","pytorch","pretrained-models","hacktoberfest","model-hub"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":14,"issueCount":129,"starsCount":1764,"forksCount":454,"license":"Apache License 2.0","participation":[4,8,3,8,8,2,2,4,1,4,9,1,12,31,7,13,7,10,13,8,7,13,11,10,15,17,4,9,15,0,7,9,12,4,0,7,4,7,16,7,5,12,11,5,15,18,15,13,0,1,7,13],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-01T12:59:34.531Z"}},{"type":"Public","name":"accelerate","owner":"huggingface","isFork":false,"description":"🚀 A simple way to launch, train, and use PyTorch models on almost any device and distributed configuration, automatic mixed precision (including fp8), and easy-to-configure FSDP and DeepSpeed support","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":18,"issueCount":105,"starsCount":7164,"forksCount":838,"license":"Apache License 2.0","participation":[32,17,9,25,4,29,14,10,10,11,9,13,15,7,17,3,1,8,10,4,11,11,10,14,13,8,12,7,6,5,5,17,8,8,8,13,15,1,25,13,8,10,9,13,11,11,11,13,11,9,7,4],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-01T04:38:20.330Z"}},{"type":"Public","name":"pytorch-image-models","owner":"huggingface","isFork":false,"description":"The largest collection of PyTorch image encoders / backbones. Including train, eval, inference, export scripts, and pretrained weights -- ResNet, ResNeXT, EfficientNet, NFNet, Vision Transformer (ViT), MobileNet-V3/V2, RegNet, DPN, CSPNet, Swin Transformer, MaxViT, CoAtNet, ConvNeXt, and more","allTopics":["pytorch","imagenet","image-classification","resnet","pretrained-models","mixnet","pretrained-weights","distributed-training","dual-path-networks","mobilenet-v2","mobile-deep-learning","mobilenetv3","efficientnet","augmix","randaugment","nfnets","normalization-free-training","vision-transformer-models","convnext","maxvit"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":30,"issueCount":56,"starsCount":30250,"forksCount":4605,"license":"Apache License 2.0","participation":[2,7,1,0,2,0,1,10,17,13,18,22,18,0,2,4,7,4,4,20,4,13,2,6,22,1,5,6,1,5,2,5,3,9,5,13,7,4,0,2,4,11,0,3,17,3,1,18,11,19,6,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T22:09:58.223Z"}},{"type":"Public","name":"optimum-habana","owner":"huggingface","isFork":false,"description":"Easy and lightning fast training of 🤗 Transformers on Habana Gaudi processor (HPU)","allTopics":["transformers","bert","fine-tuning","hpu","habana"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":60,"issueCount":16,"starsCount":114,"forksCount":130,"license":"Apache License 2.0","participation":[11,8,6,5,6,4,8,13,2,8,16,12,15,5,16,10,21,9,11,6,13,16,11,12,19,15,14,10,9,5,5,7,10,16,11,5,11,13,11,10,20,12,15,7,11,10,21,20,14,5,12,9],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-03T03:53:45.660Z"}},{"type":"Public","name":"lm-evaluation-harness","owner":"huggingface","isFork":true,"description":"A framework for few-shot evaluation of language models.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":6,"issueCount":0,"starsCount":2,"forksCount":1413,"license":"MIT License","participation":[64,97,105,61,95,99,68,31,73,108,135,63,61,53,65,67,13,39,19,46,7,48,23,33,43,102,54,44,31,6,7,11,17,16,13,12,6,14,18,16,11,10,6,6,1,4,5,12,10,4,12,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T17:58:53.376Z"}},{"type":"Public","name":"lighteval","owner":"huggingface","isFork":false,"description":"LightEval is a lightweight LLM evaluation suite that Hugging Face has been using internally with the recently released LLM data processing library datatrove and LLM training library nanotron.","allTopics":["evaluation","evaluation-metrics","evaluation-framework","huggingface"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":11,"issueCount":42,"starsCount":409,"forksCount":48,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,7,18,4,7,12,15,0,12,7,3,4,4,1,8,2,1,1,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T16:07:18.552Z"}},{"type":"Public","name":"optimum-quanto","owner":"huggingface","isFork":false,"description":"A pytorch quantization backend for optimum","allTopics":["pytorch","quantization","optimum"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":11,"starsCount":614,"forksCount":32,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,26,13,16,37,0,0,4,7,19,56,39,25,0,10,2,11,5,13,6,10,36,12,12,25,6,9,8,10,13,1,4,3,9,7,13],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T15:24:50.198Z"}},{"type":"Public","name":"cosmopedia","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":2,"issueCount":4,"starsCount":266,"forksCount":23,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,41,0,1,0,0,2,0,0,1,2,0,0,0,0,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T10:36:24.673Z"}},{"type":"Public","name":"autotrain-advanced","owner":"huggingface","isFork":false,"description":"🤗 AutoTrain Advanced","allTopics":["natural-language-processing","natural-language-understanding","huggingface","autotrain","python","machine-learning","deep-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":15,"starsCount":3501,"forksCount":418,"license":"Apache License 2.0","participation":[5,2,7,0,2,7,8,26,5,12,19,15,18,2,5,2,9,4,3,10,8,5,3,1,13,5,30,31,40,3,0,0,0,5,4,6,14,11,11,13,15,23,9,6,7,7,37,6,24,19,20,12],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T09:53:11.916Z"}},{"type":"Public","name":"diarizers","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":3,"starsCount":206,"forksCount":13,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13,14,20,43,11,10,9,5,0,27],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T07:49:36.976Z"}},{"type":"Public","name":"parler-tts","owner":"huggingface","isFork":false,"description":"Inference and training library for high-quality TTS models.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":31,"starsCount":2703,"forksCount":276,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,29,17,11,10,4,2,1,5,59,0,8,4,4,8,11,2],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T05:58:34.870Z"}},{"type":"Public","name":"optimum-benchmark","owner":"huggingface","isFork":false,"description":"A unified multi-backend utility for benchmarking Transformers, Timm, PEFT, Diffusers and Sentence-Transformers with full support of Optimum's hardware optimizations & quantization schemes.","allTopics":["benchmark","pytorch","openvino","onnxruntime","text-generation-inference","neural-compressor","tensorrt-llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":2,"issueCount":6,"starsCount":205,"forksCount":33,"license":"Apache License 2.0","participation":[26,9,15,10,9,14,7,9,16,49,50,7,86,26,18,10,0,2,0,26,3,13,4,7,4,7,8,0,0,0,3,4,3,0,0,1,1,13,2,4,6,8,4,3,7,2,0,4,9,17,2,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-30T21:35:19.621Z"}},{"type":"Public","name":"optimum","owner":"huggingface","isFork":false,"description":"🚀 Accelerate training and inference of 🤗 Transformers and 🤗 Diffusers with easy to use hardware optimization tools","allTopics":["training","optimization","intel","transformers","tflite","onnxruntime","graphcore","habana","inference","pytorch","quantization","onnx"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":85,"issueCount":274,"starsCount":2226,"forksCount":386,"license":"Apache License 2.0","participation":[5,11,4,12,10,6,11,16,12,10,3,12,10,19,4,6,2,5,5,13,6,13,7,6,1,0,13,14,1,1,0,13,5,10,1,8,10,8,10,0,1,9,5,3,10,2,6,2,2,1,3,10],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-30T16:42:35.753Z"}},{"type":"Public","name":"dataset-viewer","owner":"huggingface","isFork":false,"description":"Lightweight web API for visualizing and exploring any dataset - computer vision, speech, text, and tabular - stored on the Hugging Face Hub","allTopics":["nlp","data","machine-learning","api-rest","datasets","huggingface"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":3,"issueCount":146,"starsCount":628,"forksCount":62,"license":"Apache License 2.0","participation":[27,23,25,29,32,12,13,32,41,26,25,11,11,32,13,17,38,33,27,20,13,20,24,15,14,7,15,15,7,0,7,22,15,9,16,20,17,28,17,17,24,11,13,9,26,18,9,8,10,33,13,15],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-30T16:15:46.742Z"}},{"type":"Public","name":"distil-whisper","owner":"huggingface","isFork":false,"description":"Distilled variant of Whisper for speech recognition. 6x faster, 50% smaller, within 1% word error rate.","allTopics":["audio","speech-recognition","whisper"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":4,"issueCount":51,"starsCount":3266,"forksCount":231,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,35,4,37,0,0,7,1,2,0,0,0,3,0,4,2,0,0,0,0,0,1,27,14,2,3,0,20,0,0,2,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-30T09:56:33.229Z"}},{"type":"Public","name":"doc-builder","owner":"huggingface","isFork":false,"description":"The package used to build the documentation of our Hugging Face repos","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":5,"issueCount":45,"starsCount":69,"forksCount":30,"license":"Apache License 2.0","participation":[5,0,0,0,2,0,0,0,0,2,0,2,1,0,2,7,18,1,1,3,4,5,0,6,6,4,1,0,0,0,0,0,1,2,11,0,0,0,0,1,1,0,1,3,2,0,1,2,0,0,0,1],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-30T08:37:29.720Z"}},{"type":"Public","name":"tgi-gaudi","owner":"huggingface","isFork":true,"description":"Large Language Model Text Generation Inference on Habana Gaudi","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":6,"issueCount":2,"starsCount":17,"forksCount":899,"license":"Apache License 2.0","participation":[4,4,6,8,10,16,18,13,13,13,15,3,8,13,3,2,20,14,3,5,4,0,4,2,8,5,5,20,9,0,1,9,10,21,13,7,12,21,28,2,7,16,9,9,23,0,4,1,6,3,2,3],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-31T13:44:19.319Z"}},{"type":"Public","name":"api-inference-community","owner":"huggingface","isFork":false,"description":"","allTopics":["hacktoberfest"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":40,"issueCount":32,"starsCount":144,"forksCount":57,"license":"Apache License 2.0","participation":[3,5,4,0,3,1,1,4,9,2,0,0,1,0,0,0,0,4,2,1,0,2,3,1,0,3,7,0,1,3,0,2,0,0,1,4,0,0,3,2,1,1,2,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-28T17:08:26.643Z"}},{"type":"Public","name":"dataspeech","owner":"huggingface","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":4,"issueCount":8,"starsCount":205,"forksCount":22,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-28T12:00:25.403Z"}}],"repositoryCount":111,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}