diff --git "a/posts.csv" "b/posts.csv" new file mode 100644--- /dev/null +++ "b/posts.csv" @@ -0,0 +1,11365 @@ +avatarUrl,followerCount,fullname,name,slug,content,rawContent,attachments,mentions,reactions,publishedAt,updatedAt,commentators,url,totalUniqueImpressions,identifiedLanguage,numComments +https://cdn-avatars.huggingface.co/v1/production/uploads/5f57ea2d3f32f12a3c0692e6/b-9GG2p--smCameUPeCBN.jpeg,112.0,Alex,asigalov61,301808424415801,"[{'type': 'text', 'value': 'Check out new symbolic music AI front end and CLI training app', 'raw': 'Check out new symbolic music AI front end and CLI training app'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://webchatappai.github.io/midi-gen/', 'raw': 'https://webchatappai.github.io/midi-gen/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/WebChatAppAi/Orpheus-Midi-Model-Maker', 'raw': 'https://github.com/WebChatAppAi/Orpheus-Midi-Model-Maker'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'Timzoid', 'raw': '@Timzoid'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'Csplk', 'raw': '@Csplk'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'not-lain', 'raw': '@not-lain'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'victor', 'raw': '@victor'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'bartowski', 'raw': '@bartowski'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'John6666', 'raw': '@John6666'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Check out new symbolic music AI front end and CLI training app + +https://webchatappai.github.io/midi-gen/ + +https://github.com/WebChatAppAi/Orpheus-Midi-Model-Maker + +@Timzoid @Csplk @not-lain @victor @bartowski @John6666 ",[],"[{'_id': '6435718aaaef013d1aec3b8b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg', 'fullname': 'Bartowski', 'name': 'bartowski', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7068}, {'_id': '62d93a2b28f9c86a40314043', 'avatarUrl': '/avatars/b2725bb163fa15d6c5856121780d52eb.svg', 'fullname': 'Ci Splunk', 'name': 'Csplk', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 68}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185}, {'_id': '66b68c64786cfb8437308815', 'avatarUrl': '/avatars/68c1101e0cc4c9651141ee9c6bfb6372.svg', 'fullname': 'Tim Clark', 'name': 'Timzoid', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}, {'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949}]","[{'reaction': '🔥', 'users': ['asigalov61'], 'count': 1}]",2025-06-30 21:32:41,2025-06-30 21:32:41.666,[],/posts/asigalov61/301808424415801,18,"{'language': 'en', 'probability': 0.5176713466644287}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/BywZYAPzsyBSCf8yJFiUf.jpeg,23.0,Salma Mayorquin,salma-remyx,520178128759841,"[{'type': 'text', 'value': ""I'm auto-generating Docker Images to smoke-test new research repos 🔥"", 'raw': ""I'm auto-generating Docker Images to smoke-test new research repos 🔥""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Shared to Docker Hub daily! 🐳', 'raw': 'Shared to Docker Hub daily! 🐳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Today's featured paper+Image:"", 'raw': ""Today's featured paper+Image:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLaVA-Scissor: Token Compression with Semantic Connected Components for Video LLMs ', 'raw': 'LLaVA-Scissor: Token Compression with Semantic Connected Components for Video LLMs '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://hub.docker.com/repository/docker/remyxai/2506.21862v1/general', 'raw': 'https://hub.docker.com/repository/docker/remyxai/2506.21862v1/general'}]","I'm auto-generating Docker Images to smoke-test new research repos 🔥 +Shared to Docker Hub daily! 🐳 + +Today's featured paper+Image: +LLaVA-Scissor: Token Compression with Semantic Connected Components for Video LLMs + +https://hub.docker.com/repository/docker/remyxai/2506.21862v1/general",[],[],"[{'reaction': '🤯', 'users': ['salma-remyx'], 'count': 1}, {'reaction': '🔥', 'users': ['salma-remyx'], 'count': 1}]",2025-06-30 19:28:11,2025-06-30 19:28:11.540,[],/posts/salma-remyx/520178128759841,40,"{'language': 'en', 'probability': 0.8008716702461243}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64371b564aacf7bf786fb530/0lZEdVu06bx11fy1uTjpt.jpeg,495.0,Nymbo,Nymbo,958985226384513,"[{'type': 'text', 'value': 'Anyone know how to reset Claude web\'s MCP config? I connected mine when the HF MCP first released with just the default example spaces added. I added lots of other MCP spaces but Claude.ai doesn\'t update the available tools... ""Disconnecting"" the HF integration does nothing, deleting it and adding it again does nothing.', 'raw': 'Anyone know how to reset Claude web\'s MCP config? I connected mine when the HF MCP first released with just the default example spaces added. I added lots of other MCP spaces but Claude.ai doesn\'t update the available tools... ""Disconnecting"" the HF integration does nothing, deleting it and adding it again does nothing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Refreshing tools works fine in VS Code because I can manually restart it in ', 'raw': 'Refreshing tools works fine in VS Code because I can manually restart it in '}, {'type': 'inline_code', 'code': 'mcp.json', 'raw': '`mcp.json`'}, {'type': 'text', 'value': ', but claude.ai has no such option. Anyone got any ideas?', 'raw': ', but claude.ai has no such option. Anyone got any ideas?'}]","Anyone know how to reset Claude web's MCP config? I connected mine when the HF MCP first released with just the default example spaces added. I added lots of other MCP spaces but Claude.ai doesn't update the available tools... ""Disconnecting"" the HF integration does nothing, deleting it and adding it again does nothing. + +Refreshing tools works fine in VS Code because I can manually restart it in `mcp.json`, but claude.ai has no such option. Anyone got any ideas?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64371b564aacf7bf786fb530/ZisKnXp0QrBVZlqwm2-sS.png'}]",[],[],2025-06-30 16:55:05,2025-06-30 16:56:34.888,[],/posts/Nymbo/958985226384513,83,"{'language': 'en', 'probability': 0.8871386051177979}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/w3Z6xyKVBA6np65Tb16dP.jpeg,68.0,Simon Pagezy,pagezyhf,189638803943526,"[{'type': 'text', 'value': 'In case you missed it, Hugging Face expanded its collaboration with Azure a few weeks ago with a curated catalog of 10,000 models, accessible from Azure AI Foundry and Azure ML!', 'raw': 'In case you missed it, Hugging Face expanded its collaboration with Azure a few weeks ago with a curated catalog of 10,000 models, accessible from Azure AI Foundry and Azure ML!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'alvarobartt', 'raw': '@alvarobartt'}, {'type': 'text', 'value': ' cooked during these last days to prepare the one and only documentation you need, if you wanted to deploy Hugging Face models on Azure. It comes with an FAQ, great guides and examples on how to deploy VLMs, LLMs, smolagents and more to come very soon.', 'raw': ' cooked during these last days to prepare the one and only documentation you need, if you wanted to deploy Hugging Face models on Azure. It comes with an FAQ, great guides and examples on how to deploy VLMs, LLMs, smolagents and more to come very soon.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We need your feedback: come help us and let us know what else you want to see, which model we should add to the collection, which model task we should prioritize adding, what else we should build a tutorial for. You’re just an issue away on our GitHub repo!', 'raw': 'We need your feedback: come help us and let us know what else you want to see, which model we should add to the collection, which model task we should prioritize adding, what else we should build a tutorial for. You’re just an issue away on our GitHub repo!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/docs/microsoft-azure/index', 'raw': 'https://huggingface.co/docs/microsoft-azure/index'}]","In case you missed it, Hugging Face expanded its collaboration with Azure a few weeks ago with a curated catalog of 10,000 models, accessible from Azure AI Foundry and Azure ML! + +@alvarobartt cooked during these last days to prepare the one and only documentation you need, if you wanted to deploy Hugging Face models on Azure. It comes with an FAQ, great guides and examples on how to deploy VLMs, LLMs, smolagents and more to come very soon. + +We need your feedback: come help us and let us know what else you want to see, which model we should add to the collection, which model task we should prioritize adding, what else we should build a tutorial for. You’re just an issue away on our GitHub repo! + +https://huggingface.co/docs/microsoft-azure/index",[],"[{'_id': '60f0608166e5701b80ed3f02', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60f0608166e5701b80ed3f02/BHso-wSWpR9b8b8CKvodC.jpeg', 'fullname': 'Alvaro Bartolome', 'name': 'alvarobartt', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1920}]","[{'reaction': '🔥', 'users': ['jeffboudier'], 'count': 1}]",2025-06-30 15:40:57,2025-06-30 15:40:57.803,[],/posts/pagezyhf/189638803943526,86,"{'language': 'en', 'probability': 0.9303346276283264}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/626505d493e0b04d75710566/9rfJc9ORXU9J5a42Ev3v6.png,118.0,Stefano Fiorucci,anakin87,460502915743038,"[{'type': 'text', 'value': '🧰 Free up space on the Hub with ', 'raw': '🧰 Free up space on the Hub with '}, {'type': 'inline_code', 'code': 'super_squash_history', 'raw': '`super_squash_history`'}, {'type': 'text', 'value': ' 🧹', 'raw': ' 🧹'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As you may know, Hugging Face Hub has storage limits on private repos (100 GB for free users, 1 TB for PROs).', 'raw': 'As you may know, Hugging Face Hub has storage limits on private repos (100 GB for free users, 1 TB for PROs).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This weekend I did some cleanup on my private repos', 'raw': 'This weekend I did some cleanup on my private repos'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I went 1.58 TB down to 1 GB. 😅', 'raw': 'I went 1.58 TB down to 1 GB. 😅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Besides deleting old, unused models, the main tool I used was a lesser-known command:', 'raw': 'Besides deleting old, unused models, the main tool I used was a lesser-known command:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'inline_code', 'code': 'super_squash_history', 'raw': '`super_squash_history`'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'When you train a model, you often push multiple checkpoints to the Hub.', 'raw': 'When you train a model, you often push multiple checkpoints to the Hub.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Each checkpoint = a commit.', 'raw': 'Each checkpoint = a commit.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A 2.6B model in BF16 is ~5 GB.', 'raw': 'A 2.6B model in BF16 is ~5 GB.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So 10 checkpoints = 50 GB. That adds up fast.', 'raw': 'So 10 checkpoints = 50 GB. That adds up fast.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""While full commit history can be useful for rollbacks, it's often unnecessary for older experiments where only the final model matters."", 'raw': ""While full commit history can be useful for rollbacks, it's often unnecessary for older experiments where only the final model matters.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In these cases, you can use ', 'raw': 'In these cases, you can use '}, {'type': 'inline_code', 'code': 'super_squash_history', 'raw': '`super_squash_history`'}, {'type': 'text', 'value': ': it reduces your entire repo history to a single commit.', 'raw': ': it reduces your entire repo history to a single commit.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/docs/huggingface_hub/main/en/package_reference/hf_api#huggingface_hub.HfApi.super_squash_history', 'raw': 'https://huggingface.co/docs/huggingface_hub/main/en/package_reference/hf_api#huggingface_hub.HfApi.super_squash_history'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚠️ super_squash_history is a non-revertible operation. Once squashed, the commit history cannot be retrieved.', 'raw': '⚠️ super_squash_history is a non-revertible operation. Once squashed, the commit history cannot be retrieved.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hope this is useful to others.', 'raw': 'Hope this is useful to others.'}]","🧰 Free up space on the Hub with `super_squash_history` 🧹 + +As you may know, Hugging Face Hub has storage limits on private repos (100 GB for free users, 1 TB for PROs). + +This weekend I did some cleanup on my private repos +I went 1.58 TB down to 1 GB. 😅 + +Besides deleting old, unused models, the main tool I used was a lesser-known command: +`super_squash_history`. + +When you train a model, you often push multiple checkpoints to the Hub. +Each checkpoint = a commit. +A 2.6B model in BF16 is ~5 GB. +So 10 checkpoints = 50 GB. That adds up fast. + +While full commit history can be useful for rollbacks, it's often unnecessary for older experiments where only the final model matters. + +In these cases, you can use `super_squash_history`: it reduces your entire repo history to a single commit. + +https://huggingface.co/docs/huggingface_hub/main/en/package_reference/hf_api#huggingface_hub.HfApi.super_squash_history + +⚠️ super_squash_history is a non-revertible operation. Once squashed, the commit history cannot be retrieved. + +Hope this is useful to others.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626505d493e0b04d75710566/jsFV3t2IY2egmTs9Mrfe7.png'}]",[],[],2025-06-30 14:02:55,2025-06-30 14:20:37.106,[],/posts/anakin87/460502915743038,73,"{'language': 'en', 'probability': 0.8888084888458252}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/645b0c3ec35da9c7afd95421/vYBrCDagHsXAo6J2p-uG0.jpeg,23.0,Yuling,YerbaPage,465713138952268,"[{'type': 'text', 'value': 'How to defend benchmarks against knowledge leakage? 🛡️', 'raw': 'How to defend benchmarks against knowledge leakage? 🛡️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LastingBench is a framework that mitigates memorization in QA benchmarks by identifying and rewriting leakage points, thereby improving the robustness and fairness of model evaluations. 🚀✨', 'raw': 'LastingBench is a framework that mitigates memorization in QA benchmarks by identifying and rewriting leakage points, thereby improving the robustness and fairness of model evaluations. 🚀✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2506.21614'}, 'url': 'https://huggingface.co/papers/2506.21614', 'raw': 'https://huggingface.co/papers/2506.21614', 'label': 'LastingBench: Defend Benchmarks Against Knowledge Leakage (2506.21614)'}, {'type': 'text', 'value': ' 📚', 'raw': ' 📚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code and benchmark: ', 'raw': 'Code and benchmark: '}, {'type': 'link', 'href': 'https://github.com/Seriousss/LastingBench', 'raw': 'https://github.com/Seriousss/LastingBench'}, {'type': 'text', 'value': ' 🧑\u200d💻', 'raw': ' 🧑\u200d💻'}]","How to defend benchmarks against knowledge leakage? 🛡️ + +LastingBench is a framework that mitigates memorization in QA benchmarks by identifying and rewriting leakage points, thereby improving the robustness and fairness of model evaluations. 🚀✨ + +Paper: https://huggingface.co/papers/2506.21614 📚 +Code and benchmark: https://github.com/Seriousss/LastingBench 🧑‍💻",[],[],"[{'reaction': '😎', 'users': ['YerbaPage'], 'count': 1}, {'reaction': '🔥', 'users': ['YerbaPage'], 'count': 1}, {'reaction': '👀', 'users': ['YerbaPage'], 'count': 1}]",2025-06-30 13:25:04,2025-06-30 13:26:25.891,[],/posts/YerbaPage/465713138952268,72,"{'language': 'en', 'probability': 0.8276026248931885}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png,30.0,GHOSTAI,ghostai1,801640113264437,"[{'type': 'text', 'value': '# Edge AI technical challenges: A Deep Dive', 'raw': '# Edge AI technical challenges: A Deep Dive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As we continue to push the boundaries of artificial intelligence (AI), we find ourselves at the forefront of a technological revolution. One of the most exciting areas of AI development is Edge AI, which involves processing data at the edge of the network, rather than in the cloud. This approach offers numerous benefits, such as reduced latency, improved security, and decreased reliance on expensive cloud resources.', 'raw': 'As we continue to push the boundaries of artificial intelligence (AI), we find ourselves at the forefront of a technological revolution. One of the most exciting areas of AI development is Edge AI, which involves processing data at the edge of the network, rather than in the cloud. This approach offers numerous benefits, such as reduced latency, improved security, and decreased reliance on expensive cloud resources.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'However, the implementation of Edge AI is not without its technical challenges. For instance, one major hurdle is the need for powerful edge devices capable of handling the data processing requirements of Edge AI. These devices must be able to handle real-time data analysis and decision-making, which can be quite demanding.', 'raw': 'However, the implementation of Edge AI is not without its technical challenges. For instance, one major hurdle is the need for powerful edge devices capable of handling the data processing requirements of Edge AI. These devices must be able to handle real-time data analysis and decision-making, which can be quite demanding.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Another challenge lies in the complexity of deploying and managing multiple edge devices. In a traditional cloud-based model, this is relatively straightforward, but with Edge AI, it becomes a much more complicated task. Developers need to ensure that these devices are properly synchronized and that data is consistently distributed across them.', 'raw': 'Another challenge lies in the complexity of deploying and managing multiple edge devices. In a traditional cloud-based model, this is relatively straightforward, but with Edge AI, it becomes a much more complicated task. Developers need to ensure that these devices are properly synchronized and that data is consistently distributed across them.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Despite these challenges, Edge AI has the potential to revolutionize the way we interact with technology. It offers the opportunity to create more intelligent, autonomous systems that can operate in real-time, without the need for constant communication with a centralized server. With the right approach and technical solutions, we can overcome these challenges and unlock the true power of Edge AI.', 'raw': 'Despite these challenges, Edge AI has the potential to revolutionize the way we interact with technology. It offers the opportunity to create more intelligent, autonomous systems that can operate in real-time, without the need for constant communication with a centralized server. With the right approach and technical solutions, we can overcome these challenges and unlock the true power of Edge AI.'}]","# Edge AI technical challenges: A Deep Dive + +As we continue to push the boundaries of artificial intelligence (AI), we find ourselves at the forefront of a technological revolution. One of the most exciting areas of AI development is Edge AI, which involves processing data at the edge of the network, rather than in the cloud. This approach offers numerous benefits, such as reduced latency, improved security, and decreased reliance on expensive cloud resources. + +However, the implementation of Edge AI is not without its technical challenges. For instance, one major hurdle is the need for powerful edge devices capable of handling the data processing requirements of Edge AI. These devices must be able to handle real-time data analysis and decision-making, which can be quite demanding. + +Another challenge lies in the complexity of deploying and managing multiple edge devices. In a traditional cloud-based model, this is relatively straightforward, but with Edge AI, it becomes a much more complicated task. Developers need to ensure that these devices are properly synchronized and that data is consistently distributed across them. + +Despite these challenges, Edge AI has the potential to revolutionize the way we interact with technology. It offers the opportunity to create more intelligent, autonomous systems that can operate in real-time, without the need for constant communication with a centralized server. With the right approach and technical solutions, we can overcome these challenges and unlock the true power of Edge AI.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-06-30 12:00:47,2025-06-30 12:00:47.766,[],/posts/ghostai1/801640113264437,82,"{'language': 'en', 'probability': 0.9224224090576172}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61929226ded356549e20c5da/ONUjP2S5fUWd07BiFXm0i.jpeg,874.0,Sergio Paniego,sergiopaniego,945882073606324,"[{'type': 'text', 'value': '📣 CALL FOR CONTRIBUTORS! 📣', 'raw': '📣 CALL FOR CONTRIBUTORS! 📣'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Following last week’s full release of Gemma 3n, we launched a dedicated recipes repo to explore and share use cases. We already added some! 🧑\u200d🍳', 'raw': 'Following last week’s full release of Gemma 3n, we launched a dedicated recipes repo to explore and share use cases. We already added some! 🧑\u200d🍳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now we’re inviting the community to contribute and showcase how these models shine! ✨', 'raw': 'Now we’re inviting the community to contribute and showcase how these models shine! ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Let them cook.', 'raw': 'Let them cook.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out: ', 'raw': 'Check it out: '}, {'type': 'link', 'href': 'https://github.com/huggingface/huggingface-gemma-recipes/issues/4', 'raw': 'https://github.com/huggingface/huggingface-gemma-recipes/issues/4'}, {'type': 'new_line', 'raw': '\n'}]","📣 CALL FOR CONTRIBUTORS! 📣 + +Following last week’s full release of Gemma 3n, we launched a dedicated recipes repo to explore and share use cases. We already added some! 🧑‍🍳 + +Now we’re inviting the community to contribute and showcase how these models shine! ✨ + +Let them cook. + +Check it out: https://github.com/huggingface/huggingface-gemma-recipes/issues/4 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61929226ded356549e20c5da/OCTmM5bluYa9cGuWG0uLV.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666'], 'count': 1}]",2025-06-30 11:05:04,2025-06-30 11:05:04.792,[],/posts/sergiopaniego/945882073606324,120,"{'language': 'en', 'probability': 0.8726992011070251}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,587280854326828,"[{'type': 'text', 'value': 'so many multimodal releases these days 🤠', 'raw': 'so many multimodal releases these days 🤠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> ERNIE-4.5-VL: new vision language MoE models by Baidu ', 'raw': '> ERNIE-4.5-VL: new vision language MoE models by Baidu '}, {'type': 'link', 'href': 'https://huggingface.co/models?search=ernie-4.5-vl', 'raw': 'https://huggingface.co/models?search=ernie-4.5-vl'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> new visual document retrievers by NVIDIA (sota on ViDoRe!) ', 'raw': '> new visual document retrievers by NVIDIA (sota on ViDoRe!) '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nvidia/llama-nemoretriever-colembed-3b-v1'}, 'url': 'https://huggingface.co/nvidia/llama-nemoretriever-colembed-3b-v1', 'raw': 'https://huggingface.co/nvidia/llama-nemoretriever-colembed-3b-v1'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nvidia/llama-nemoretriever-colembed-1b-v1'}, 'url': 'https://huggingface.co/nvidia/llama-nemoretriever-colembed-1b-v1', 'raw': 'https://huggingface.co/nvidia/llama-nemoretriever-colembed-1b-v1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Ovis-3b: new image-text in image-text out models by Alibaba ⤵️ ', 'raw': '> Ovis-3b: new image-text in image-text out models by Alibaba ⤵️ '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/AIDC-AI/Ovis-U1-', 'raw': 'https://huggingface.co/spaces/AIDC-AI/Ovis-U1-'}]","so many multimodal releases these days 🤠 +> ERNIE-4.5-VL: new vision language MoE models by Baidu https://huggingface.co/models?search=ernie-4.5-vl +> new visual document retrievers by NVIDIA (sota on ViDoRe!) https://huggingface.co/nvidia/llama-nemoretriever-colembed-3b-v1 https://huggingface.co/nvidia/llama-nemoretriever-colembed-1b-v1 +> Ovis-3b: new image-text in image-text out models by Alibaba ⤵️ https://huggingface.co/spaces/AIDC-AI/Ovis-U1-","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/w8nBpv-nsGYDq_jY0z8oT.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'nieche', 'liamtoran'], 'count': 3}]",2025-06-30 10:43:15,2025-06-30 10:43:28.259,[],/posts/merve/587280854326828,625,"{'language': 'en', 'probability': 0.5757843852043152}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,106480718165942,"[{'type': 'text', 'value': 'The demo for Camel-Doc-OCR-062825 (exp) is optimized for document retrieval and direct Markdown (.md) generation from images and PDFs. Additional demos include OCRFlux-3B (document OCR), VilaSR (spatial reasoning with visual drawing), and ShotVL (cinematic language understanding). 🐪', 'raw': 'The demo for Camel-Doc-OCR-062825 (exp) is optimized for document retrieval and direct Markdown (.md) generation from images and PDFs. Additional demos include OCRFlux-3B (document OCR), VilaSR (spatial reasoning with visual drawing), and ShotVL (cinematic language understanding). 🐪'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✦ Space : ', 'raw': '✦ Space : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/Doc-VLMs-v2-Localization'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/Doc-VLMs-v2-Localization', 'raw': 'https://huggingface.co/spaces/prithivMLmods/Doc-VLMs-v2-Localization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Models :', 'raw': 'Models :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ camel-doc-ocr-062825 : ', 'raw': '⤷ camel-doc-ocr-062825 : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Camel-Doc-OCR-062825'}, 'url': 'https://huggingface.co/prithivMLmods/Camel-Doc-OCR-062825', 'raw': 'https://huggingface.co/prithivMLmods/Camel-Doc-OCR-062825'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ ocrflux-3b : ', 'raw': '⤷ ocrflux-3b : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ChatDOC/OCRFlux-3B'}, 'url': 'https://huggingface.co/ChatDOC/OCRFlux-3B', 'raw': 'https://huggingface.co/ChatDOC/OCRFlux-3B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ vilasr : ', 'raw': '⤷ vilasr : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'AntResearchNLP/ViLaSR'}, 'url': 'https://huggingface.co/AntResearchNLP/ViLaSR', 'raw': 'https://huggingface.co/AntResearchNLP/ViLaSR'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ shotvl : ', 'raw': '⤷ shotvl : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Vchitect/ShotVL-7B'}, 'url': 'https://huggingface.co/Vchitect/ShotVL-7B', 'raw': 'https://huggingface.co/Vchitect/ShotVL-7B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ Multimodal Implementations : ', 'raw': '⤷ Multimodal Implementations : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0'}, 'url': 'https://huggingface.co/collections/prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0', 'raw': 'https://huggingface.co/collections/prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The community GPU grant was given by Hugging Face — special thanks to them. This space supports the following tasks: (image inference, video inference) with result markdown canvas and object detection/localization. 🤗🚀', 'raw': 'The community GPU grant was given by Hugging Face — special thanks to them. This space supports the following tasks: (image inference, video inference) with result markdown canvas and object detection/localization. 🤗🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To know more about it, visit the model card of the respective model. !!', 'raw': 'To know more about it, visit the model card of the respective model. !!'}]","The demo for Camel-Doc-OCR-062825 (exp) is optimized for document retrieval and direct Markdown (.md) generation from images and PDFs. Additional demos include OCRFlux-3B (document OCR), VilaSR (spatial reasoning with visual drawing), and ShotVL (cinematic language understanding). 🐪 + +✦ Space : https://huggingface.co/spaces/prithivMLmods/Doc-VLMs-v2-Localization + +Models : +⤷ camel-doc-ocr-062825 : https://huggingface.co/prithivMLmods/Camel-Doc-OCR-062825 +⤷ ocrflux-3b : https://huggingface.co/ChatDOC/OCRFlux-3B +⤷ vilasr : https://huggingface.co/AntResearchNLP/ViLaSR +⤷ shotvl : https://huggingface.co/Vchitect/ShotVL-7B + +⤷ Multimodal Implementations : https://huggingface.co/collections/prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0 + +The community GPU grant was given by Hugging Face — special thanks to them. This space supports the following tasks: (image inference, video inference) with result markdown canvas and object detection/localization. 🤗🚀 + +. +. +. +To know more about it, visit the model card of the respective model. !!","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/FW7bj1AoQD6vDYir7X-7N.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/uYLwTErX4Zo0r3VjwgOTr.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/eIZk-5tbCxWHeybN5q7-r.mp4'}]",[],"[{'reaction': '🤗', 'users': ['prithivMLmods', 'John6666'], 'count': 2}, {'reaction': '❤️', 'users': ['prithivMLmods', 'zhang123123'], 'count': 2}, {'reaction': '👀', 'users': ['prithivMLmods'], 'count': 1}]",2025-06-30 09:30:09,2025-06-30 11:50:38.653,[],/posts/prithivMLmods/106480718165942,826,"{'language': 'en', 'probability': 0.6653190851211548}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/657566a76da136b50faaa48c/EvXVCEchiFsUiuLefhWsT.png,34.0,Yeonseok Kim,yeonseok-zeticai,894951796717498,"[{'type': 'text', 'value': '🚀 Real-Time On-Device AI Agent with Polaris-4B — Run It Yourself, No Cloud, No Cost', 'raw': '🚀 Real-Time On-Device AI Agent with Polaris-4B — Run It Yourself, No Cloud, No Cost'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We just deployed a real-time on-device AI agent using the Polaris-4B-Preview model — one of the top-performing <6B open LLMs on Hugging Face.', 'raw': 'We just deployed a real-time on-device AI agent using the Polaris-4B-Preview model — one of the top-performing <6B open LLMs on Hugging Face.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 What’s remarkable?', 'raw': '📱 What’s remarkable?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This model runs entirely on a mobile device, without cloud, and without any manual optimization. It was built using ZETIC.MLange, and the best part?', 'raw': 'This model runs entirely on a mobile device, without cloud, and without any manual optimization. It was built using ZETIC.MLange, and the best part?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ It’s totally automated, free to use, and anyone can do it.', 'raw': '➡️ It’s totally automated, free to use, and anyone can do it.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You don’t need to write deployment code, tweak backends, or touch device-specific SDKs. Just upload your model — and ZETIC.MLange handles the rest.', 'raw': 'You don’t need to write deployment code, tweak backends, or touch device-specific SDKs. Just upload your model — and ZETIC.MLange handles the rest.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 About the Model', 'raw': '🧠 About the Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Model: Polaris-4B-Preview', 'raw': '- Model: Polaris-4B-Preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Size: ~4B parameters', 'raw': '- Size: ~4B parameters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Ranking: Top 3 on Hugging Face LLM Leaderboard (<6B)', 'raw': '- Ranking: Top 3 on Hugging Face LLM Leaderboard (<6B)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Tokenizer: Token-incremental inference supported', 'raw': '- Tokenizer: Token-incremental inference supported'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Modifications: None — stock weights, just optimized for mobile', 'raw': '- Modifications: None — stock weights, just optimized for mobile'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ What ZETIC.MLange Does', 'raw': '⚙️ What ZETIC.MLange Does'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ZETIC.MLange is a fully automated deployment framework for On-Device AI, built for AI engineers who want to focus on models — not infrastructure.', 'raw': ' ZETIC.MLange is a fully automated deployment framework for On-Device AI, built for AI engineers who want to focus on models — not infrastructure.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here’s what it does in minutes:', 'raw': 'Here’s what it does in minutes:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📊 Analyzes model structure', 'raw': '- 📊 Analyzes model structure'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ⚙️ Converts to mobile-optimized format (e.g., GGUF, ONNX)', 'raw': '- ⚙️ Converts to mobile-optimized format (e.g., GGUF, ONNX)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📦 Generates a runnable runtime environment with pre/post-processing', 'raw': '- 📦 Generates a runnable runtime environment with pre/post-processing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📱 Targets real mobile hardware (CPU, GPU, NPU — including Qualcomm, MediaTek, Apple)', 'raw': '- 📱 Targets real mobile hardware (CPU, GPU, NPU — including Qualcomm, MediaTek, Apple)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🎯 Gives you a downloadable SDK or mobile app component — ready to run', 'raw': '- 🎯 Gives you a downloadable SDK or mobile app component — ready to run'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And yes — this is available now, for free, at ', 'raw': 'And yes — this is available now, for free, at '}, {'type': 'link', 'href': 'https://mlange.zetic.ai', 'raw': 'https://mlange.zetic.ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧪 For AI Engineers Like You, If you want to:', 'raw': '🧪 For AI Engineers Like You, If you want to:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Test LLMs directly on-device', 'raw': '- Test LLMs directly on-device'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Run models offline with no latency', 'raw': '- Run models offline with no latency'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Avoid cloud GPU costs', 'raw': '- Avoid cloud GPU costs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Deploy to mobile without writing app-side inference code', 'raw': '- Deploy to mobile without writing app-side inference code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then this is your moment. You can do exactly what we did, using your own models — all in a few clicks.', 'raw': 'Then this is your moment. You can do exactly what we did, using your own models — all in a few clicks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Start here → ', 'raw': '🎯 Start here → '}, {'type': 'link', 'href': 'https://mlange.zetic.ai', 'raw': 'https://mlange.zetic.ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📬 Want to try Polaris-4B on your own app? contact@zetic.ai, or just visit ', 'raw': '📬 Want to try Polaris-4B on your own app? contact@zetic.ai, or just visit '}, {'type': 'link', 'href': 'https://mlange.zetic.ai', 'raw': 'https://mlange.zetic.ai'}, {'type': 'text', 'value': ' , it is opened as free!', 'raw': ' , it is opened as free!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Great work ', 'raw': 'Great work '}, {'type': 'mention', 'user': 'Chancy', 'raw': '@Chancy'}, {'type': 'text', 'value': ', ', 'raw': ', '}, {'type': 'mention', 'user': 'Zhihui', 'raw': '@Zhihui'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'tobiaslee', 'raw': '@tobiaslee'}, {'type': 'text', 'value': ' !', 'raw': ' !'}, {'type': 'new_line', 'raw': '\n'}]","🚀 Real-Time On-Device AI Agent with Polaris-4B — Run It Yourself, No Cloud, No Cost + +We just deployed a real-time on-device AI agent using the Polaris-4B-Preview model — one of the top-performing <6B open LLMs on Hugging Face. + +📱 What’s remarkable? +This model runs entirely on a mobile device, without cloud, and without any manual optimization. It was built using ZETIC.MLange, and the best part? + +➡️ It’s totally automated, free to use, and anyone can do it. +You don’t need to write deployment code, tweak backends, or touch device-specific SDKs. Just upload your model — and ZETIC.MLange handles the rest. + +🧠 About the Model +- Model: Polaris-4B-Preview +- Size: ~4B parameters +- Ranking: Top 3 on Hugging Face LLM Leaderboard (<6B) +- Tokenizer: Token-incremental inference supported +- Modifications: None — stock weights, just optimized for mobile + +⚙️ What ZETIC.MLange Does + ZETIC.MLange is a fully automated deployment framework for On-Device AI, built for AI engineers who want to focus on models — not infrastructure. + +Here’s what it does in minutes: +- 📊 Analyzes model structure +- ⚙️ Converts to mobile-optimized format (e.g., GGUF, ONNX) +- 📦 Generates a runnable runtime environment with pre/post-processing +- 📱 Targets real mobile hardware (CPU, GPU, NPU — including Qualcomm, MediaTek, Apple) +- 🎯 Gives you a downloadable SDK or mobile app component — ready to run +And yes — this is available now, for free, at https://mlange.zetic.ai + +🧪 For AI Engineers Like You, If you want to: +- Test LLMs directly on-device +- Run models offline with no latency +- Avoid cloud GPU costs +- Deploy to mobile without writing app-side inference code + +Then this is your moment. You can do exactly what we did, using your own models — all in a few clicks. + +🎯 Start here → https://mlange.zetic.ai + +📬 Want to try Polaris-4B on your own app? contact@zetic.ai, or just visit https://mlange.zetic.ai , it is opened as free! + +Great work @Chancy, @Zhihui , @tobiaslee ! +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/FdxjGJcJHeRLKtmLWJkXR.qt'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/SlVvtYHiRg_UOdfddEHiH.qt'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/DyHIpmJNVqdQ5qfP1tkGQ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/lfX9-xp6RN3rNdU3mlBH-.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/8aBJnCa2zNh5bbos-vY9u.png'}]","[{'_id': '64acb321264bbbf171a2b040', 'avatarUrl': '/avatars/0ad344c0e9b1e3fda469932f91d117dc.svg', 'fullname': 'Chenxin An', 'name': 'Chancy', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7}, {'_id': '6038d6d0612f5eef3cc05ea9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6038d6d0612f5eef3cc05ea9/ryhvAX5djQpD5OrIlZQ1f.jpeg', 'fullname': 'Lei Li', 'name': 'tobiaslee', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 16}, {'_id': '622f103fc78da4c7ebd7c887', 'avatarUrl': '/avatars/b0c7cd29835d92c2cd584947fcd5d520.svg', 'fullname': 'Xie', 'name': 'Zhihui', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 11}]","[{'reaction': '👀', 'users': ['yeonseok-zeticai', 'John6666', 'pedromcf', 'S01aris', 'AtAndDev', 'andreaschandra'], 'count': 6}, {'reaction': '🔥', 'users': ['yeonseok-zeticai', 'pcuenq'], 'count': 2}, {'reaction': '🚀', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '❤️', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '🧠', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '🤝', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '👍', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '🤗', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '����', 'users': ['yeonseok-zeticai'], 'count': 1}]",2025-06-25 16:38:37,2025-06-25 16:39:33.867,[],/posts/yeonseok-zeticai/894951796717498,3097,"{'language': 'en', 'probability': 0.831764817237854}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/mrB2AFA3K9g3gJz-WxxjH.jpeg,2.0,kanaria007,kanaria007,119667181576110,"[{'type': 'text', 'value': '✅ New Article on Hugging Face: Teaching AI to Remember Meaningfully — Not Just Store Tokens', 'raw': '✅ New Article on Hugging Face: Teaching AI to Remember Meaningfully — Not Just Store Tokens'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Title:', 'raw': 'Title:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Understanding the Memory-Loop Protocol: Structured Memory and Reflective Learning', 'raw': '🧠 Understanding the Memory-Loop Protocol: Structured Memory and Reflective Learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Read the article here: ', 'raw': '🔗 Read the article here: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/kanaria007/understanding-the-memory-loop-protocol', 'raw': 'https://huggingface.co/blog/kanaria007/understanding-the-memory-loop-protocol'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Summary:', 'raw': 'Summary:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Following the Ethics Interface Protocol — which enabled models to reason with moral awareness — this new article introduces the Memory-Loop Protocol, a system for embedding *reflective memory structures* into AI systems.', 'raw': 'Following the Ethics Interface Protocol — which enabled models to reason with moral awareness — this new article introduces the Memory-Loop Protocol, a system for embedding *reflective memory structures* into AI systems.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Most models forget their own thought processes. Even when they “repeat” ideas, they don’t know why. This protocol changes that.', 'raw': 'Most models forget their own thought processes. Even when they “repeat” ideas, they don’t know why. This protocol changes that.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instead of expanding context windows or storing raw logs, the Memory-Loop Protocol teaches AI systems to:', 'raw': 'Instead of expanding context windows or storing raw logs, the Memory-Loop Protocol teaches AI systems to:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Identify recurring reasoning patterns', 'raw': '• Identify recurring reasoning patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Reflect on *why* a loop occurred — and if it was productive', 'raw': '• Reflect on *why* a loop occurred — and if it was productive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Compress meaningful loops into reusable templates', 'raw': '• Compress meaningful loops into reusable templates'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Discard reasoning paths that caused contradiction or stagnation', 'raw': '• Discard reasoning paths that caused contradiction or stagnation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This isn’t just retention — it’s **structural memory with reflective compression**.', 'raw': 'This isn’t just retention — it’s **structural memory with reflective compression**.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The protocol enables:', 'raw': 'The protocol enables:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Pattern-based memory indexing', 'raw': '• Pattern-based memory indexing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Loop-trigger diagnostics and trace encoding', 'raw': '• Loop-trigger diagnostics and trace encoding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Meta-cognitive principles for reuse', 'raw': '• Meta-cognitive principles for reuse'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Forgetting directives for cognitive pruning', 'raw': '• Forgetting directives for cognitive pruning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Seamless integration with models like GPT-4o, Claude, Gemini', 'raw': '• Seamless integration with models like GPT-4o, Claude, Gemini'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resources:', 'raw': 'Resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🧠 Protocol Dataset: ', 'raw': '• 🧠 Protocol Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'kanaria007/agi-structural-intelligence-protocols'}, 'url': 'https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols', 'raw': 'https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 📑 Included: Loop trace encoders, compression macros, semantic loss detection, guided forgetting protocol', 'raw': '• 📑 Included: Loop trace encoders, compression macros, semantic loss detection, guided forgetting protocol'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Relevant for:', 'raw': 'Relevant for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Developers building memory-aware AI', 'raw': '• Developers building memory-aware AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Cognitive architecture researchers', 'raw': '• Cognitive architecture researchers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Meta-cognition and self-reflection modeling', 'raw': '• Meta-cognition and self-reflection modeling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Anyone exploring how AI can *learn from experience structurally*', 'raw': '• Anyone exploring how AI can *learn from experience structurally*'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is not about making AI remember more —', 'raw': 'This is not about making AI remember more —'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It’s about teaching AI to remember *intelligently, structurally, and meaningfully*.', 'raw': 'It’s about teaching AI to remember *intelligently, structurally, and meaningfully*.'}]","✅ New Article on Hugging Face: Teaching AI to Remember Meaningfully — Not Just Store Tokens + +Title: +🧠 Understanding the Memory-Loop Protocol: Structured Memory and Reflective Learning +🔗 Read the article here: https://huggingface.co/blog/kanaria007/understanding-the-memory-loop-protocol + +Summary: +Following the Ethics Interface Protocol — which enabled models to reason with moral awareness — this new article introduces the Memory-Loop Protocol, a system for embedding *reflective memory structures* into AI systems. + +Most models forget their own thought processes. Even when they “repeat” ideas, they don’t know why. This protocol changes that. + +Instead of expanding context windows or storing raw logs, the Memory-Loop Protocol teaches AI systems to: + +• Identify recurring reasoning patterns +• Reflect on *why* a loop occurred — and if it was productive +• Compress meaningful loops into reusable templates +• Discard reasoning paths that caused contradiction or stagnation + +This isn’t just retention — it’s **structural memory with reflective compression**. + +The protocol enables: + +• Pattern-based memory indexing +• Loop-trigger diagnostics and trace encoding +• Meta-cognitive principles for reuse +• Forgetting directives for cognitive pruning +• Seamless integration with models like GPT-4o, Claude, Gemini + +Resources: +• 🧠 Protocol Dataset: https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols + +• 📑 Included: Loop trace encoders, compression macros, semantic loss detection, guided forgetting protocol + +Relevant for: +• Developers building memory-aware AI +• Cognitive architecture researchers +• Meta-cognition and self-reflection modeling +• Anyone exploring how AI can *learn from experience structurally* + +This is not about making AI remember more — +It’s about teaching AI to remember *intelligently, structurally, and meaningfully*.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-06-25 13:48:17,2025-06-25 13:48:17.643,[],/posts/kanaria007/119667181576110,226,"{'language': 'en', 'probability': 0.7903667092323303}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620ec53f7c959335c0c65e4b/zYyp99Ufu5HAKFfzLvz95.png,87.0,Francisco Aranda,frascuchon,689415817219194,"[{'type': 'text', 'value': 'Extended Dataset with Sheets 🚀', 'raw': 'Extended Dataset with Sheets 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I used Sheets to extend the fka/awesome-chatgpt-prompts dataset with a single prompt 💡. Check out the result: ', 'raw': 'I used Sheets to extend the fka/awesome-chatgpt-prompts dataset with a single prompt 💡. Check out the result: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'frascuchon/extended_fka_awesome_chatgpt_prompts'}, 'url': 'https://huggingface.co/datasets/frascuchon/extended_fka_awesome_chatgpt_prompts', 'raw': 'https://huggingface.co/datasets/frascuchon/extended_fka_awesome_chatgpt_prompts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try Sheets to expand your datasets: ', 'raw': 'Try Sheets to expand your datasets: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'aisheets/sheets'}, 'url': 'https://huggingface.co/spaces/aisheets/sheets', 'raw': 'https://huggingface.co/spaces/aisheets/sheets'}, {'type': 'text', 'value': ' 🛠️', 'raw': ' 🛠️'}]","Extended Dataset with Sheets 🚀 + +I used Sheets to extend the fka/awesome-chatgpt-prompts dataset with a single prompt 💡. Check out the result: https://huggingface.co/datasets/frascuchon/extended_fka_awesome_chatgpt_prompts + +Try Sheets to expand your datasets: https://huggingface.co/spaces/aisheets/sheets 🛠️",[],[],"[{'reaction': '🚀', 'users': ['John6666', 'dvilasuero'], 'count': 2}]",2025-06-25 13:24:08,2025-06-25 13:24:08.595,[],/posts/frascuchon/689415817219194,2299,"{'language': 'en', 'probability': 0.6223888993263245}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1654278567459-626a9bfa03e2e2796f24ca11.jpeg,295.0,Freddy Boulton,freddyaboulton,509614991477853,"[{'type': 'text', 'value': 'The new ', 'raw': 'The new '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'multimodalart/self-forcing'}, 'url': 'https://huggingface.co/spaces/multimodalart/self-forcing', 'raw': 'https://huggingface.co/spaces/multimodalart/self-forcing'}, {'type': 'text', 'value': ' model and demo are truly impressive!', 'raw': ' model and demo are truly impressive!'}]",The new https://huggingface.co/spaces/multimodalart/self-forcing model and demo are truly impressive!,"[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626a9bfa03e2e2796f24ca11/94W-WxW0Xw90zz7kcHj7P.mp4'}]",[],"[{'reaction': '🔥', 'users': ['BrigitteTousi', 'John6666', 'Tehom'], 'count': 3}]",2025-06-25 12:55:48,2025-06-25 12:55:48.389,[],/posts/freddyaboulton/509614991477853,3176,"{'language': 'en', 'probability': 0.8688749670982361}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cVIfbrAiZy8mSd6bp-W0k.jpeg,23.0,Adrien Gallouët,angt,195625992085357,"[{'type': 'text', 'value': 'Just published: Nano-vLLM meets Inference Endpoints', 'raw': 'Just published: Nano-vLLM meets Inference Endpoints'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I show how to bind Nano-vLLM (supporting Qwen3-0.6B) to a web service — and deploy it easily on Hugging Face Inference Endpoints.', 'raw': 'I show how to bind Nano-vLLM (supporting Qwen3-0.6B) to a web service — and deploy it easily on Hugging Face Inference Endpoints.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Minimalist engine, maximum fun!', 'raw': 'Minimalist engine, maximum fun!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/angt/nano-vllm-meets-inference-endpoints', 'raw': 'https://huggingface.co/blog/angt/nano-vllm-meets-inference-endpoints'}, {'type': 'new_line', 'raw': '\n'}]","Just published: Nano-vLLM meets Inference Endpoints + +I show how to bind Nano-vLLM (supporting Qwen3-0.6B) to a web service — and deploy it easily on Hugging Face Inference Endpoints. + +Minimalist engine, maximum fun! + +https://huggingface.co/blog/angt/nano-vllm-meets-inference-endpoints +",[],[],"[{'reaction': '🔥', 'users': ['John6666'], 'count': 1}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}]",2025-06-25 09:08:42,2025-06-25 09:08:42.082,[],/posts/angt/195625992085357,253,"{'language': 'en', 'probability': 0.7149595618247986}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png,30.0,GHOSTAI,ghostai1,489990465918905,"[{'type': 'text', 'value': '# Artificial Intelligence applications in education: A Deep Dive', 'raw': '# Artificial Intelligence applications in education: A Deep Dive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Artificial Intelligence has undoubtedly become an integral part of our lives, and education is no exception. The bespectacled nerd in your classroom is not just for show anymore. With advancements in AI-driven technology, education has undergone a transformation. We now have smart assistants, enhanced learning aids, and self-correcting solutions that are revolutionizing the way we learn.', 'raw': 'Artificial Intelligence has undoubtedly become an integral part of our lives, and education is no exception. The bespectacled nerd in your classroom is not just for show anymore. With advancements in AI-driven technology, education has undergone a transformation. We now have smart assistants, enhanced learning aids, and self-correcting solutions that are revolutionizing the way we learn.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In the realm of education, AI has proven to be a game-changer, providing personalized learning experiences to each student. With AI-driven technology, students can now learn at their own pace, focusing on their strengths and weaknesses. AI-powered tutors are becoming increasingly popular, as they intelligently analyze a student's progress and provide guidance tailored to their needs."", 'raw': ""In the realm of education, AI has proven to be a game-changer, providing personalized learning experiences to each student. With AI-driven technology, students can now learn at their own pace, focusing on their strengths and weaknesses. AI-powered tutors are becoming increasingly popular, as they intelligently analyze a student's progress and provide guidance tailored to their needs.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Moreover, AI-driven technology is helping educators by automating routine tasks such as grading and administrative work, giving them more time to focus on teaching. The integration of AI in the classroom is also making learning more interactive and engaging. Virtual labs, adaptive quizzes, and 'twin AI' systems, which simulate students' progress, are proving to be extremely beneficial in enhancing learning outcomes."", 'raw': ""Moreover, AI-driven technology is helping educators by automating routine tasks such as grading and administrative work, giving them more time to focus on teaching. The integration of AI in the classroom is also making learning more interactive and engaging. Virtual labs, adaptive quizzes, and 'twin AI' systems, which simulate students' progress, are proving to be extremely beneficial in enhancing learning outcomes.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In conclusion, the applications of AI in education are vast, and the potential for growth is immense. AI is not only transforming the way we learn but also making education more equitable and accessible. The future of AI in education is bright, and we await with bated breath to see what new developments it will bring.', 'raw': 'In conclusion, the applications of AI in education are vast, and the potential for growth is immense. AI is not only transforming the way we learn but also making education more equitable and accessible. The future of AI in education is bright, and we await with bated breath to see what new developments it will bring.'}]","# Artificial Intelligence applications in education: A Deep Dive + +Artificial Intelligence has undoubtedly become an integral part of our lives, and education is no exception. The bespectacled nerd in your classroom is not just for show anymore. With advancements in AI-driven technology, education has undergone a transformation. We now have smart assistants, enhanced learning aids, and self-correcting solutions that are revolutionizing the way we learn. + +In the realm of education, AI has proven to be a game-changer, providing personalized learning experiences to each student. With AI-driven technology, students can now learn at their own pace, focusing on their strengths and weaknesses. AI-powered tutors are becoming increasingly popular, as they intelligently analyze a student's progress and provide guidance tailored to their needs. + +Moreover, AI-driven technology is helping educators by automating routine tasks such as grading and administrative work, giving them more time to focus on teaching. The integration of AI in the classroom is also making learning more interactive and engaging. Virtual labs, adaptive quizzes, and 'twin AI' systems, which simulate students' progress, are proving to be extremely beneficial in enhancing learning outcomes. + +In conclusion, the applications of AI in education are vast, and the potential for growth is immense. AI is not only transforming the way we learn but also making education more equitable and accessible. The future of AI in education is bright, and we await with bated breath to see what new developments it will bring.",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-06-25 06:49:12,2025-06-25 06:49:12.310,[],/posts/ghostai1/489990465918905,239,"{'language': 'en', 'probability': 0.9645358324050903}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64be41c330a1f0f0f0a1e0eb/8W-z1xMGnQGQ2M4LXnZfy.jpeg,94.0,VORTEX,Abhaykoul,997219525730173,"[{'type': 'text', 'value': ""Introducing Dhanishtha 2.0: World's first Intermediate Thinking Model"", 'raw': ""Introducing Dhanishtha 2.0: World's first Intermediate Thinking Model""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Dhanishtha 2.0 is the world's first LLM designed to think between the responses. Unlike other Reasoning LLMs, which think just once."", 'raw': ""Dhanishtha 2.0 is the world's first LLM designed to think between the responses. Unlike other Reasoning LLMs, which think just once.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dhanishtha can think, rethink, self-evaluate, and refine in between responses using multiple blocks.', 'raw': 'Dhanishtha can think, rethink, self-evaluate, and refine in between responses using multiple blocks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This technique makes it Hinghlt Token efficient it Uses up to 79% fewer tokens than DeepSeek R1', 'raw': 'This technique makes it Hinghlt Token efficient it Uses up to 79% fewer tokens than DeepSeek R1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '---', 'raw': '---'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can try our model from: ', 'raw': 'You can try our model from: '}, {'type': 'link', 'href': 'https://helpingai.co/chat', 'raw': 'https://helpingai.co/chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Also, we're gonna Open-Source Dhanistha on July 1st."", 'raw': ""Also, we're gonna Open-Source Dhanistha on July 1st.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '---', 'raw': '---'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For Devs:', 'raw': 'For Devs:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔑 Get your API key at ', 'raw': '🔑 Get your API key at '}, {'type': 'link', 'href': 'https://helpingai.co/dashboard', 'raw': 'https://helpingai.co/dashboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'from HelpingAI import HAI # pip install HelpingAI==1.1.1\nfrom rich import print\n\nhai = HAI(api_key=""hl-***********************"")\n\nresponse = hai.chat.completions.create(\n model=""Dhanishtha-2.0-preview"",\n messages=[{""role"": ""user"", ""content"": ""What is the value of ∫0∞𝑥3/𝑥−1𝑑𝑥 ?""}],\n stream=True,\n hide_think=False # Hide or show models thinking\n)\n\nfor chunk in response:\n print(chunk.choices[0].delta.content, end="""", flush=True)', 'raw': '```\nfrom HelpingAI import HAI # pip install HelpingAI==1.1.1\nfrom rich import print\n\nhai = HAI(api_key=""hl-***********************"")\n\nresponse = hai.chat.completions.create(\n model=""Dhanishtha-2.0-preview"",\n messages=[{""role"": ""user"", ""content"": ""What is the value of ∫0∞𝑥3/𝑥−1𝑑𝑥 ?""}],\n stream=True,\n hide_think=False # Hide or show models thinking\n)\n\nfor chunk in response:\n print(chunk.choices[0].delta.content, end="""", flush=True)\n```'}]","Introducing Dhanishtha 2.0: World's first Intermediate Thinking Model + +Dhanishtha 2.0 is the world's first LLM designed to think between the responses. Unlike other Reasoning LLMs, which think just once. + +Dhanishtha can think, rethink, self-evaluate, and refine in between responses using multiple blocks. +This technique makes it Hinghlt Token efficient it Uses up to 79% fewer tokens than DeepSeek R1 +--- + +You can try our model from: https://helpingai.co/chat +Also, we're gonna Open-Source Dhanistha on July 1st. + +--- +For Devs: +🔑 Get your API key at https://helpingai.co/dashboard +``` +from HelpingAI import HAI # pip install HelpingAI==1.1.1 +from rich import print + +hai = HAI(api_key=""hl-***********************"") + +response = hai.chat.completions.create( + model=""Dhanishtha-2.0-preview"", + messages=[{""role"": ""user"", ""content"": ""What is the value of ∫0∞𝑥3/𝑥−1𝑑𝑥 ?""}], + stream=True, + hide_think=False # Hide or show models thinking +) + +for chunk in response: + print(chunk.choices[0].delta.content, end="""", flush=True) +```",[],[],"[{'reaction': '🔥', 'users': ['Abhaykoul', 'AIMaster7', 'VarunGuptaPy', 'Lewdiculous', 'KingNish', 'John6666', '9voltfan2009', 'pretermodernist', 'Lokesh-CODER', 'UnstableLlama', 'JohnRoger', 'natalie5', 'MalikIbrar', 'drwlf'], 'count': 14}, {'reaction': '👍', 'users': ['ajayhindujaswiss', 'Abhaykoul', 'Parveshiiii', 'VarunGuptaPy', 'KingNish', '9voltfan2009', 'Embedded-Engineering'], 'count': 7}]",2025-06-25 04:36:59,2025-06-26 13:56:59.295,"[{'_id': '64be41c330a1f0f0f0a1e0eb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64be41c330a1f0f0f0a1e0eb/8W-z1xMGnQGQ2M4LXnZfy.jpeg', 'fullname': 'VORTEX', 'name': 'Abhaykoul', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 94, 'isFollowing': False}, {'_id': '6457c91433df199f4e5d5814', 'avatarUrl': '/avatars/15819e209a1aca2c7b18bfdad6facf3f.svg', 'fullname': 'Lokesh', 'name': 'Lokesh-CODER', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/Abhaykoul/997219525730173,4007,"{'language': 'en', 'probability': 0.83376544713974}",2 +/avatars/7e5b4b94d80d405026261fd723a4d1e8.svg,39.0,Dhruv,dhruv3006,516065137585895,"[{'type': 'text', 'value': 'WebBench: A real-world benchmark for Browser Agents', 'raw': 'WebBench: A real-world benchmark for Browser Agents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'WebBench is an open, task-oriented benchmark designed to measure how effectively browser agents handle complex, realistic web workflows. It includes 2,454 tasks across 452 live websites selected from the global top-1000 by traffic.', 'raw': 'WebBench is an open, task-oriented benchmark designed to measure how effectively browser agents handle complex, realistic web workflows. It includes 2,454 tasks across 452 live websites selected from the global top-1000 by traffic.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Github : ', 'raw': 'Github : '}, {'type': 'link', 'href': 'https://github.com/Halluminate/WebBench', 'raw': 'https://github.com/Halluminate/WebBench'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","WebBench: A real-world benchmark for Browser Agents + +WebBench is an open, task-oriented benchmark designed to measure how effectively browser agents handle complex, realistic web workflows. It includes 2,454 tasks across 452 live websites selected from the global top-1000 by traffic. + +Github : https://github.com/Halluminate/WebBench + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66db0c02ba8010cc014ea962/wvRxa-bKylxefPzFOLXce.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-06-25 02:24:25,2025-06-25 02:24:25.594,[],/posts/dhruv3006/516065137585895,218,"{'language': 'en', 'probability': 0.8654242753982544}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a813dedbb9e28866a91b27/2fknEF_u6StSjp3uUF144.png,41.0,appvoid,appvoid,125456320825765,"[{'type': 'text', 'value': 'have you ever wanted to quickly prototype an idea with a language model but get intimidated by the whole setup? no issues! now you can try building a custom one from scratch!', 'raw': 'have you ever wanted to quickly prototype an idea with a language model but get intimidated by the whole setup? no issues! now you can try building a custom one from scratch!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'beware, it might be addictive once you learn how it works: ', 'raw': 'beware, it might be addictive once you learn how it works: '}, {'type': 'link', 'href': 'https://nohak.pythonanywhere.com/', 'raw': 'https://nohak.pythonanywhere.com/'}]","have you ever wanted to quickly prototype an idea with a language model but get intimidated by the whole setup? no issues! now you can try building a custom one from scratch! + + +beware, it might be addictive once you learn how it works: https://nohak.pythonanywhere.com/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62a813dedbb9e28866a91b27/icDvx1rD766yEIRpm8U2a.png'}]",[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-06-25 01:40:48,2025-06-25 01:40:48.353,[],/posts/appvoid/125456320825765,204,"{'language': 'en', 'probability': 0.938521683216095}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65282ea6d0c969f265fa73cf/NYkbSWQIXje012gY947jk.png,15.0,Ben F,BFFree,190836494252067,"[{'type': 'text', 'value': 'Working on some chess set concepts. I went towards minimal sculpted shapes then returned to some traditionalism. ', 'raw': 'Working on some chess set concepts. I went towards minimal sculpted shapes then returned to some traditionalism. '}]",Working on some chess set concepts. I went towards minimal sculpted shapes then returned to some traditionalism. ,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65282ea6d0c969f265fa73cf/SWmhG39T7VR-XRh__swi4.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65282ea6d0c969f265fa73cf/yEy8RQVVXwk9Qcqr6BAVH.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65282ea6d0c969f265fa73cf/2PS8qD-4sXQ33c7F8rcN_.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65282ea6d0c969f265fa73cf/SPN-iVTCjRh2GlZhixFuK.png'}]",[],"[{'reaction': '👍', 'users': ['JLouisBiz', 'sprakhar778', 'Lewdiculous', 'John6666', 'Dev9124'], 'count': 5}, {'reaction': '😔', 'users': ['takeraparterer'], 'count': 1}]",2025-06-24 19:20:52,2025-06-24 19:20:52.517,[],/posts/BFFree/190836494252067,2678,"{'language': 'en', 'probability': 0.9751266241073608}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66c8dc99951843ca6762fe02/yagkY9dY7_-qw8hAAPWiK.png,93.0,Rebekah Bogdanoff,DualityAI-RebekahBogdanoff,352866416610611,"[{'type': 'text', 'value': ""🤔 Ready to build better AI models with synthetic data, but don't know where to start? Why go at it alone?💡"", 'raw': ""🤔 Ready to build better AI models with synthetic data, but don't know where to start? Why go at it alone?💡""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👋 Join Duality AI’s Falcon community! It is one of the best resources for support, creativity, and growth as you move along your synthetic data journey. ', 'raw': '👋 Join Duality AI’s Falcon community! It is one of the best resources for support, creativity, and growth as you move along your synthetic data journey. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️Our current Kaggle competition is the easiest way to get started: ', 'raw': '➡️Our current Kaggle competition is the easiest way to get started: '}, {'type': 'link', 'href': 'https://www.kaggle.com/competitions/multi-instance-object-detection-challenge', 'raw': 'https://www.kaggle.com/competitions/multi-instance-object-detection-challenge'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""When you join, you'll meet some of the rising stars in our community, such as:"", 'raw': ""When you join, you'll meet some of the rising stars in our community, such as:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 Sergio Sanz, ', 'raw': '🌟 Sergio Sanz, '}, {'type': 'mention', 'user': 'sergio-sanz-rodriguez', 'raw': '@sergio-sanz-rodriguez'}, {'type': 'text', 'value': ' , who took 1st and 2nd place in recent computer vision Kaggle competitions and shared his process of using R-CNN and Falcon-generated images in this article: ', 'raw': ' , who took 1st and 2nd place in recent computer vision Kaggle competitions and shared his process of using R-CNN and Falcon-generated images in this article: '}, {'type': 'link', 'href': 'https://www.duality.ai/blog/leveraging-synthetic-data-for-real-world-object-detection', 'raw': 'https://www.duality.ai/blog/leveraging-synthetic-data-for-real-world-object-detection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟Mohana pavan Bezawada, ', 'raw': '🌟Mohana pavan Bezawada, '}, {'type': 'mention', 'user': 'mohanapavan', 'raw': '@mohanapavan'}, {'type': 'text', 'value': ', who has risen in the ranks from the top 25 in the first competition all the way to top scorer in our current competition! His journey illustrates how dedication + Falcon can take you far in your AI journey.', 'raw': ', who has risen in the ranks from the top 25 in the first competition all the way to top scorer in our current competition! His journey illustrates how dedication + Falcon can take you far in your AI journey.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟Nadia TRIKI, who delivered top-tier results in two of our recent Kaggle competitions and shared a detailed breakdown of her strategy - showcasing a deep command of AI training workflows and a commitment to helping others succeed. ', 'raw': '🌟Nadia TRIKI, who delivered top-tier results in two of our recent Kaggle competitions and shared a detailed breakdown of her strategy - showcasing a deep command of AI training workflows and a commitment to helping others succeed. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ángel Jacinto Sánchez Ruiz, ', 'raw': 'Ángel Jacinto Sánchez Ruiz, '}, {'type': 'mention', 'user': 'Sacus', 'raw': '@Sacus'}, {'type': 'text', 'value': ' , who mastered FalconCloud to create targeted, high-performance datasets and provided crucial feedback and product requests that improved the data not only for him but for all of the current competitors. ', 'raw': ' , who mastered FalconCloud to create targeted, high-performance datasets and provided crucial feedback and product requests that improved the data not only for him but for all of the current competitors. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤩 Join our community today to partner with these super stars, and many more!', 'raw': '🤩 Join our community today to partner with these super stars, and many more!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Sign up for a free Falcon subscription here: ', 'raw': '➡️ Sign up for a free Falcon subscription here: '}, {'type': 'link', 'href': 'https://www.duality.ai/edu', 'raw': 'https://www.duality.ai/edu'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️Join our Kaggle competition for hands on learning and a chance to win rewards and recognition! ', 'raw': '➡️Join our Kaggle competition for hands on learning and a chance to win rewards and recognition! '}, {'type': 'link', 'href': 'https://www.kaggle.com/competitions/multi-instance-object-detection-challenge', 'raw': 'https://www.kaggle.com/competitions/multi-instance-object-detection-challenge'}]","🤔 Ready to build better AI models with synthetic data, but don't know where to start? Why go at it alone?💡 + +👋 Join Duality AI’s Falcon community! It is one of the best resources for support, creativity, and growth as you move along your synthetic data journey. + +➡️Our current Kaggle competition is the easiest way to get started: https://www.kaggle.com/competitions/multi-instance-object-detection-challenge + +When you join, you'll meet some of the rising stars in our community, such as: + +🌟 Sergio Sanz, @sergio-sanz-rodriguez , who took 1st and 2nd place in recent computer vision Kaggle competitions and shared his process of using R-CNN and Falcon-generated images in this article: https://www.duality.ai/blog/leveraging-synthetic-data-for-real-world-object-detection + +🌟Mohana pavan Bezawada, @mohanapavan, who has risen in the ranks from the top 25 in the first competition all the way to top scorer in our current competition! His journey illustrates how dedication + Falcon can take you far in your AI journey. + +🌟Nadia TRIKI, who delivered top-tier results in two of our recent Kaggle competitions and shared a detailed breakdown of her strategy - showcasing a deep command of AI training workflows and a commitment to helping others succeed. + +Ángel Jacinto Sánchez Ruiz, @Sacus , who mastered FalconCloud to create targeted, high-performance datasets and provided crucial feedback and product requests that improved the data not only for him but for all of the current competitors. + +🤩 Join our community today to partner with these super stars, and many more! + +➡️ Sign up for a free Falcon subscription here: https://www.duality.ai/edu +➡️Join our Kaggle competition for hands on learning and a chance to win rewards and recognition! https://www.kaggle.com/competitions/multi-instance-object-detection-challenge","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66c8dc99951843ca6762fe02/oGb-pn9vpFWxSsaSIR4Ot.png'}]","[{'_id': '67bc954115ebb48955ca7d4d', 'avatarUrl': '/avatars/1d62cf1eaeca65a8c5ec3b03a4885109.svg', 'fullname': 'mohana pavan bezwada', 'name': 'mohanapavan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}, {'_id': '65294e953efa1da5dde72c28', 'avatarUrl': '/avatars/d75e2373fc0b25946d256eb7376b13b4.svg', 'fullname': 'Angel Sanchez Ruiz', 'name': 'Sacus', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}, {'_id': '6613ff77fe5e52cbade7ff03', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6613ff77fe5e52cbade7ff03/sA1G2U5NaDlf3vy8u-xeL.jpeg', 'fullname': 'Sergio Sanz-Rodriguez', 'name': 'sergio-sanz-rodriguez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}]","[{'reaction': '🔥', 'users': ['DualityAI-RebekahBogdanoff', 'RishiJ17', 'John6666', 'robb-0'], 'count': 4}, {'reaction': '🤗', 'users': ['DualityAI-RebekahBogdanoff', 'RishiJ17'], 'count': 2}]",2025-06-20 22:15:48,2025-06-26 16:10:08.828,"[{'_id': '683861adfdfd0cef3ef9fd7f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/683861adfdfd0cef3ef9fd7f/d6gmkoEX-BdOtny63-Yyu.jpeg', 'fullname': 'Irasubiza Viateur', 'name': 'VIATEUR-AI', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '66c8dc99951843ca6762fe02', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66c8dc99951843ca6762fe02/yagkY9dY7_-qw8hAAPWiK.png', 'fullname': 'Rebekah Bogdanoff', 'name': 'DualityAI-RebekahBogdanoff', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 93, 'isFollowing': False}]",/posts/DualityAI-RebekahBogdanoff/352866416610611,2515,"{'language': 'en', 'probability': 0.9092299342155457}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/6825fa752c4f59748f89f112/OFVt0wb_AxHGxJtFEZS8s.png,4.0,Masa,ZacMasa5000,671663175914167,"[{'type': 'text', 'value': ""Hey builders! I created a no-code tool to scrape real-time Twitter data into JSON for LLMs and agents. What are the top use cases you're working on where live social data helps?"", 'raw': ""Hey builders! I created a no-code tool to scrape real-time Twitter data into JSON for LLMs and agents. What are the top use cases you're working on where live social data helps?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MasaFoundation/X-Twitter-Scraper'}, 'url': 'https://huggingface.co/spaces/MasaFoundation/X-Twitter-Scraper', 'raw': 'https://huggingface.co/spaces/MasaFoundation/X-Twitter-Scraper'}]","Hey builders! I created a no-code tool to scrape real-time Twitter data into JSON for LLMs and agents. What are the top use cases you're working on where live social data helps? + +https://huggingface.co/spaces/MasaFoundation/X-Twitter-Scraper",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-06-20 19:44:43,2025-06-20 19:44:43.388,[],/posts/ZacMasa5000/671663175914167,242,"{'language': 'en', 'probability': 0.8836880922317505}",0 +/avatars/ead5364faf178250e4464f31aab6966f.svg,8.0,Home Intelligent System,brainhome,128948549150065,"[{'type': 'text', 'value': 'Trinity-Synthesis: A Multi-Agent Architecture for AI Agents That Think Before They Speak', 'raw': 'Trinity-Synthesis: A Multi-Agent Architecture for AI Agents That Think Before They Speak'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ever felt your AI agent is ""shooting from the hip""? It latches onto a single line of thought and fails to produce a robust, well-rounded plan. This is a common struggle I\'ve called the ""AI Reasoning Paradox.""', 'raw': 'Ever felt your AI agent is ""shooting from the hip""? It latches onto a single line of thought and fails to produce a robust, well-rounded plan. This is a common struggle I\'ve called the ""AI Reasoning Paradox.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To tackle this, I developed Trinity-Synthesis, a multi-agent architecture designed to force reflection and synthesis before delivering a final answer. The philosophy is simple: constructive conflict between different perspectives leads to better solutions.', 'raw': 'To tackle this, I developed Trinity-Synthesis, a multi-agent architecture designed to force reflection and synthesis before delivering a final answer. The philosophy is simple: constructive conflict between different perspectives leads to better solutions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here’s the core idea:', 'raw': 'Here’s the core idea:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instead of one agent, it uses four agents running on the same base model but with different ""personalities"" defined by their system prompts and temperature settings:', 'raw': 'Instead of one agent, it uses four agents running on the same base model but with different ""personalities"" defined by their system prompts and temperature settings:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 The Visionary: Thinks outside the box (high temp: 1.0).', 'raw': '🧠 The Visionary: Thinks outside the box (high temp: 1.0).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 The Analyst: Focuses on logic, data, and structure (low temp: 0.3).', 'raw': '📊 The Analyst: Focuses on logic, data, and structure (low temp: 0.3).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ The Pragmatist: Evaluates feasibility, costs, and risks (mid temp: 0.5).', 'raw': '🛠️ The Pragmatist: Evaluates feasibility, costs, and risks (mid temp: 0.5).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'These three ""thinkers"" work in parallel on the same problem. Then, a final Synthesizer agent critically analyzes their outputs, rejects flawed arguments, and integrates the best points into a single, coherent, and often superior strategy.', 'raw': 'These three ""thinkers"" work in parallel on the same problem. Then, a final Synthesizer agent critically analyzes their outputs, rejects flawed arguments, and integrates the best points into a single, coherent, and often superior strategy.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The result is a more robust reasoning process that balances creativity with analytical rigor, making it ideal for solving complex, strategic problems where answer quality is critical.', 'raw': 'The result is a more robust reasoning process that balances creativity with analytical rigor, making it ideal for solving complex, strategic problems where answer quality is critical.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I\'ve written a deep dive on how it works, including a detailed case study (""The Helios Initiative"") and the Python source code for you to experiment with.', 'raw': 'I\'ve written a deep dive on how it works, including a detailed case study (""The Helios Initiative"") and the Python source code for you to experiment with.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the full article on Medium:', 'raw': 'Read the full article on Medium:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://medium.com/@brainhome9/trinity-synthesis-how-i-built-an-ai-agent-that-thinks-before-it-speaks-d45d45c2827c', 'raw': 'https://medium.com/@brainhome9/trinity-synthesis-how-i-built-an-ai-agent-that-thinks-before-it-speaks-d45d45c2827c'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'd love to hear your feedback and see what you build with it!"", 'raw': ""I'd love to hear your feedback and see what you build with it!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #AIAgents #LLM #Reasoning #MultiAgent ', 'raw': '#AI #AIAgents #LLM #Reasoning #MultiAgent '}]","Trinity-Synthesis: A Multi-Agent Architecture for AI Agents That Think Before They Speak +Ever felt your AI agent is ""shooting from the hip""? It latches onto a single line of thought and fails to produce a robust, well-rounded plan. This is a common struggle I've called the ""AI Reasoning Paradox."" + +To tackle this, I developed Trinity-Synthesis, a multi-agent architecture designed to force reflection and synthesis before delivering a final answer. The philosophy is simple: constructive conflict between different perspectives leads to better solutions. + +Here’s the core idea: + +Instead of one agent, it uses four agents running on the same base model but with different ""personalities"" defined by their system prompts and temperature settings: + +🧠 The Visionary: Thinks outside the box (high temp: 1.0). +📊 The Analyst: Focuses on logic, data, and structure (low temp: 0.3). +🛠️ The Pragmatist: Evaluates feasibility, costs, and risks (mid temp: 0.5). +These three ""thinkers"" work in parallel on the same problem. Then, a final Synthesizer agent critically analyzes their outputs, rejects flawed arguments, and integrates the best points into a single, coherent, and often superior strategy. + +The result is a more robust reasoning process that balances creativity with analytical rigor, making it ideal for solving complex, strategic problems where answer quality is critical. + +I've written a deep dive on how it works, including a detailed case study (""The Helios Initiative"") and the Python source code for you to experiment with. + +Read the full article on Medium: +https://medium.com/@brainhome9/trinity-synthesis-how-i-built-an-ai-agent-that-thinks-before-it-speaks-d45d45c2827c + +I'd love to hear your feedback and see what you build with it! + +#AI #AIAgents #LLM #Reasoning #MultiAgent ",[],[],"[{'reaction': '👀', 'users': ['John6666', 'AtAndDev', 'drwlf'], 'count': 3}, {'reaction': '👍', 'users': ['JLouisBiz', 'AtAndDev'], 'count': 2}]",2025-06-20 17:35:01,2025-06-21 12:36:29.240,"[{'_id': '64b846b625e5f8a3e0e07c67', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64b846b625e5f8a3e0e07c67/510CUVpRJAmzsvAoAxrHM.jpeg', 'fullname': 'AYDIN KULAN', 'name': 'IIIWhiteWolfIII', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '5e767e4f3d77a72421292d05', 'avatarUrl': '/avatars/ead5364faf178250e4464f31aab6966f.svg', 'fullname': 'Home Intelligent System', 'name': 'brainhome', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}]",/posts/brainhome/128948549150065,1728,"{'language': 'en', 'probability': 0.8988749384880066}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/jm8yXMDvGKLcnpn_fUlfz.png,,phamducanh,phamducanhhh,546330932868610,"[{'type': 'text', 'value': 'hi, why i can not use Deepsite v2 to develop my project anymore, tks ', 'raw': 'hi, why i can not use Deepsite v2 to develop my project anymore, tks '}]","hi, why i can not use Deepsite v2 to develop my project anymore, tks ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6852d12c3095f288307851e7/mePg-t79P72YxzXcGm1BH.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-06-20 15:29:18,2025-06-21 05:40:38.745,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/phamducanhhh/546330932868610,224,"{'language': 'en', 'probability': 0.8042855262756348}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,300457326273979,"[{'type': 'text', 'value': ""y'all have been asking my opinion on how OCR models compare to each other 👀"", 'raw': ""y'all have been asking my opinion on how OCR models compare to each other 👀""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I will leave three apps to compare newest models by ', 'raw': 'I will leave three apps to compare newest models by '}, {'type': 'mention', 'user': 'prithivMLmods', 'raw': '@prithivMLmods'}, {'type': 'text', 'value': ' instead ⤵️', 'raw': ' instead ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> compare Nanonets-OCR-s, Qwen2-VL-OCR-2B-Instruct, RolmOCR, Aya-Vision ', 'raw': '> compare Nanonets-OCR-s, Qwen2-VL-OCR-2B-Instruct, RolmOCR, Aya-Vision '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/Multimodal-OCR'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR', 'raw': 'https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> SmolDocling, Nanonets-OCR-s, MonkeyOCR, Typhoon-OCR-7B ', 'raw': '> SmolDocling, Nanonets-OCR-s, MonkeyOCR, Typhoon-OCR-7B '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/Multimodal-OCR2'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR2', 'raw': 'https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> docscopeOCR, MonkeyOCR, coreOCR ', 'raw': '> docscopeOCR, MonkeyOCR, coreOCR '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/core-OCR'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/core-OCR', 'raw': 'https://huggingface.co/spaces/prithivMLmods/core-OCR'}]","y'all have been asking my opinion on how OCR models compare to each other 👀 +I will leave three apps to compare newest models by @prithivMLmods instead ⤵️ +> compare Nanonets-OCR-s, Qwen2-VL-OCR-2B-Instruct, RolmOCR, Aya-Vision https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR +> SmolDocling, Nanonets-OCR-s, MonkeyOCR, Typhoon-OCR-7B https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR2 +> docscopeOCR, MonkeyOCR, coreOCR https://huggingface.co/spaces/prithivMLmods/core-OCR",[],"[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957}]","[{'reaction': '🚀', 'users': ['OmerKuru', 'John6666', 'rwightman', 'subk', 'denniskeller', 'Csplk', 'johnlockejrr', 'openfree', 'bvtilt', 'joseph-bou'], 'count': 10}]",2025-06-20 12:44:12,2025-06-24 00:41:01.837,"[{'_id': '601173801032ecb700eba704', 'avatarUrl': '/avatars/5a0627cded64c72d3ebaefb543a66a0b.svg', 'fullname': 'nwzjk', 'name': 'nwzjk', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/merve/300457326273979,2254,"{'language': 'en', 'probability': 0.7738863229751587}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/657566a76da136b50faaa48c/EvXVCEchiFsUiuLefhWsT.png,34.0,Yeonseok Kim,yeonseok-zeticai,509797148011121,"[{'type': 'text', 'value': '🚀 Try deep-research without Network Connection and Data Leak, Jan-Nano production-ready on-device AI in 6 hours! ', 'raw': '🚀 Try deep-research without Network Connection and Data Leak, Jan-Nano production-ready on-device AI in 6 hours! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': "" Jan-Nano has been making waves as one of HuggingFace's most trending 4B parameter models, outperforming even 671B models on SimpleQA benchmarks. But here's what changes everything: ZETIC.MLange just transformed Jan-Nano into a blazing-fast on-device AI solution."", 'raw': "" Jan-Nano has been making waves as one of HuggingFace's most trending 4B parameter models, outperforming even 671B models on SimpleQA benchmarks. But here's what changes everything: ZETIC.MLange just transformed Jan-Nano into a blazing-fast on-device AI solution.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 6-hour deployment from huggingface to production-ready library!', 'raw': '✨ 6-hour deployment from huggingface to production-ready library!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Zero cloud dependency - complete privacy and offline capability', 'raw': ' Zero cloud dependency - complete privacy and offline capability'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""While others struggle with complex on-device deployments taking weeks or months, ZETIC.MLange's automated pipeline makes it effortless. No manual optimization, no vendor-specific coding, no compromise on performance."", 'raw': ""While others struggle with complex on-device deployments taking weeks or months, ZETIC.MLange's automated pipeline makes it effortless. No manual optimization, no vendor-specific coding, no compromise on performance.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 Ready to transform your AI models? Try ZETIC.MLange, it is totally free now! ', 'raw': '📱 Ready to transform your AI models? Try ZETIC.MLange, it is totally free now! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The future of AI is on-device. Make it happen in hours, not months.', 'raw': 'The future of AI is on-device. Make it happen in hours, not months.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#OnDeviceAI #EdgeAI #MLOptimization #NPU #PrivacyFirst', 'raw': '#OnDeviceAI #EdgeAI #MLOptimization #NPU #PrivacyFirst'}]","🚀 Try deep-research without Network Connection and Data Leak, Jan-Nano production-ready on-device AI in 6 hours! + + Jan-Nano has been making waves as one of HuggingFace's most trending 4B parameter models, outperforming even 671B models on SimpleQA benchmarks. But here's what changes everything: ZETIC.MLange just transformed Jan-Nano into a blazing-fast on-device AI solution. + +✨ 6-hour deployment from huggingface to production-ready library! + + Zero cloud dependency - complete privacy and offline capability +While others struggle with complex on-device deployments taking weeks or months, ZETIC.MLange's automated pipeline makes it effortless. No manual optimization, no vendor-specific coding, no compromise on performance. + +📱 Ready to transform your AI models? Try ZETIC.MLange, it is totally free now! + +The future of AI is on-device. Make it happen in hours, not months. + +#OnDeviceAI #EdgeAI #MLOptimization #NPU #PrivacyFirst","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/L9j9PYSLZNin_6OHIAAQx.qt'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657566a76da136b50faaa48c/9XgPVv-q_0hoWEdxs3KCM.png'}]",[],"[{'reaction': '🔥', 'users': ['yeonseok-zeticai', 'juanignacio97', 'AtAndDev'], 'count': 3}, {'reaction': '👀', 'users': ['yeonseok-zeticai', 'Wladastic'], 'count': 2}, {'reaction': '🚀', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '🧠', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '🤗', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '👍', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '🤝', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '😎', 'users': ['yeonseok-zeticai'], 'count': 1}, {'reaction': '➕', 'users': ['yeonseok-zeticai'], 'count': 1}]",2025-06-20 11:17:30,2025-06-23 01:27:03.308,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '646ba0d4c7f672003c851ed2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646ba0d4c7f672003c851ed2/sQ6GX_MyH4KOQgi85sR5W.png', 'fullname': 'Wladastic', 'name': 'Wladastic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}, {'_id': '657566a76da136b50faaa48c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/657566a76da136b50faaa48c/EvXVCEchiFsUiuLefhWsT.png', 'fullname': 'Yeonseok Kim', 'name': 'yeonseok-zeticai', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 34, 'isFollowing': False}]",/posts/yeonseok-zeticai/509797148011121,1387,"{'language': 'en', 'probability': 0.8500382900238037}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/6051e59531c5be7f3dd5ebc9/iW1huuI60224DPBzn2cki.jpeg,141.0,Giada Pistilli,giadap,887797954746272,"[{'type': 'text', 'value': '🗣️ Whose voice do we hear when AI speaks?', 'raw': '🗣️ Whose voice do we hear when AI speaks?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Every language carries its own cultural values and worldviews. So, when we build AI systems, we're not just deciding how they speak but also whose perspectives they represent."", 'raw': ""Every language carries its own cultural values and worldviews. So, when we build AI systems, we're not just deciding how they speak but also whose perspectives they represent.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Even choosing which dialect to train on in Norway becomes a question of inclusion and power. In Kenya, will AI speak Swahili from Nairobi or coastal regions? What about indigenous languages with rich oral traditions but limited written text, like Quechua in Peru or Cherokee in North America?', 'raw': 'Even choosing which dialect to train on in Norway becomes a question of inclusion and power. In Kenya, will AI speak Swahili from Nairobi or coastal regions? What about indigenous languages with rich oral traditions but limited written text, like Quechua in Peru or Cherokee in North America?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The path forward? Building WITH communities, not just FOR them. Working with local partners (libraries, universities, civil society), testing for cultural alignment, and asking hard questions about representation.', 'raw': 'The path forward? Building WITH communities, not just FOR them. Working with local partners (libraries, universities, civil society), testing for cultural alignment, and asking hard questions about representation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just published some thoughts on this after my keynote in Norway a few weeks ago: ', 'raw': 'Just published some thoughts on this after my keynote in Norway a few weeks ago: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/giadap/when-ai-speaks', 'raw': 'https://huggingface.co/blog/giadap/when-ai-speaks'}]","🗣️ Whose voice do we hear when AI speaks? + +Every language carries its own cultural values and worldviews. So, when we build AI systems, we're not just deciding how they speak but also whose perspectives they represent. + +Even choosing which dialect to train on in Norway becomes a question of inclusion and power. In Kenya, will AI speak Swahili from Nairobi or coastal regions? What about indigenous languages with rich oral traditions but limited written text, like Quechua in Peru or Cherokee in North America? + +The path forward? Building WITH communities, not just FOR them. Working with local partners (libraries, universities, civil society), testing for cultural alignment, and asking hard questions about representation. + +Just published some thoughts on this after my keynote in Norway a few weeks ago: https://huggingface.co/blog/giadap/when-ai-speaks",[],[],"[{'reaction': '❤️', 'users': ['GloriaMK', 'John6666', 'K-ai-Innovations', 'jeffboudier', 'MrFuture86', 'evijit'], 'count': 6}, {'reaction': '🔥', 'users': ['JLouisBiz'], 'count': 1}]",2025-06-20 09:45:08,2025-06-21 19:26:08.279,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/giadap/887797954746272,1865,"{'language': 'en', 'probability': 0.9347947239875793}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,142374005596149,"[{'type': 'text', 'value': ""The demo for smoldocling / nanonets ocr / typhoon ocr / monkey ocr explores the document OCR capabilities of various newly released multimodal VLMs in a single space. And if you're experiencing or demoing long document image OCR, kindly use the Smoldocling 256M preview [ Smoldocling is back in demo here. ] 🤗. "", 'raw': ""The demo for smoldocling / nanonets ocr / typhoon ocr / monkey ocr explores the document OCR capabilities of various newly released multimodal VLMs in a single space. And if you're experiencing or demoing long document image OCR, kindly use the Smoldocling 256M preview [ Smoldocling is back in demo here. ] 🤗. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✦ Try the demo here : ', 'raw': '✦ Try the demo here : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/Multimodal-OCR2'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR2', 'raw': 'https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ MonkeyOCR Recognition : ', 'raw': '⤷ MonkeyOCR Recognition : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'echo840/MonkeyOCR'}, 'url': 'https://huggingface.co/echo840/MonkeyOCR/tree/main/Recognition', 'raw': 'https://huggingface.co/echo840/MonkeyOCR/tree/main/Recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ Nanonets-OCR-s : ', 'raw': '⤷ Nanonets-OCR-s : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nanonets/Nanonets-OCR-s'}, 'url': 'https://huggingface.co/nanonets/Nanonets-OCR-s', 'raw': 'https://huggingface.co/nanonets/Nanonets-OCR-s'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ SmolDocling-256M-preview : ', 'raw': '⤷ SmolDocling-256M-preview : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ds4sd/SmolDocling-256M-preview'}, 'url': 'https://huggingface.co/ds4sd/SmolDocling-256M-preview', 'raw': 'https://huggingface.co/ds4sd/SmolDocling-256M-preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ typhoon-ocr-7b : ', 'raw': '⤷ typhoon-ocr-7b : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'scb10x/typhoon-ocr-7b'}, 'url': 'https://huggingface.co/scb10x/typhoon-ocr-7b', 'raw': 'https://huggingface.co/scb10x/typhoon-ocr-7b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ Multimodal Implementations : ', 'raw': '⤷ Multimodal Implementations : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0'}, 'url': 'https://huggingface.co/collections/prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0', 'raw': 'https://huggingface.co/collections/prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⤷ Github : ', 'raw': '⤷ Github : '}, {'type': 'link', 'href': 'https://github.com/PRITHIVSAKTHIUR/Multimodal-OCR2', 'raw': 'https://github.com/PRITHIVSAKTHIUR/Multimodal-OCR2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The community GPU grant was given by Hugging Face — special thanks to them. 🤗🚀', 'raw': 'The community GPU grant was given by Hugging Face — special thanks to them. 🤗🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To know more about it, visit the model card of the respective model. !!', 'raw': 'To know more about it, visit the model card of the respective model. !!'}]","The demo for smoldocling / nanonets ocr / typhoon ocr / monkey ocr explores the document OCR capabilities of various newly released multimodal VLMs in a single space. And if you're experiencing or demoing long document image OCR, kindly use the Smoldocling 256M preview [ Smoldocling is back in demo here. ] 🤗. + +✦ Try the demo here : https://huggingface.co/spaces/prithivMLmods/Multimodal-OCR2 + +⤷ MonkeyOCR Recognition : https://huggingface.co/echo840/MonkeyOCR/tree/main/Recognition +⤷ Nanonets-OCR-s : https://huggingface.co/nanonets/Nanonets-OCR-s +⤷ SmolDocling-256M-preview : https://huggingface.co/ds4sd/SmolDocling-256M-preview +⤷ typhoon-ocr-7b : https://huggingface.co/scb10x/typhoon-ocr-7b + +⤷ Multimodal Implementations : https://huggingface.co/collections/prithivMLmods/multimodal-implementations-67c9982ea04b39f0608badb0 + +⤷ Github : https://github.com/PRITHIVSAKTHIUR/Multimodal-OCR2 + + +The community GPU grant was given by Hugging Face — special thanks to them. 🤗🚀 + + + +To know more about it, visit the model card of the respective model. !!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/S3yCiwgi8LQIRz7DFTcCe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/0owQf1N9Jmu0p5DagYdZ_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/r9OJCCdaBHyfByu9lpNuD.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/r6uwZh32e5XsZwCRQk5Qj.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/Z0wjrPOp4_enODOkpiVVl.mp4'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'prithivMLmods', 'merve'], 'count': 3}, {'reaction': '🤗', 'users': ['prithivMLmods', 'merve'], 'count': 2}, {'reaction': '🚀', 'users': ['prithivMLmods', 'merve'], 'count': 2}, {'reaction': '👍', 'users': ['Parma7876', 'prithivMLmods'], 'count': 2}]",2025-06-20 07:44:13,2025-06-22 04:37:19.578,"[{'_id': '664b1e48f604081903070d5b', 'avatarUrl': '/avatars/a8336a8cdc8e156e417513d5726148df.svg', 'fullname': 'Muche', 'name': 'Glider95', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957, 'isFollowing': False}]",/posts/prithivMLmods/142374005596149,1827,"{'language': 'en', 'probability': 0.7009366154670715}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6615494716917dfdc645c44e/-qgGeUlEsf5z-jilWawyO.png,17.0,Daniel Fox,FlameF0X,246825684905159,"[{'type': 'text', 'value': 'Hello there!', 'raw': 'Hello there!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I just find out that all the SnowflakeCore-G0 series are Mask Language Models instead of LLM's."", 'raw': ""I just find out that all the SnowflakeCore-G0 series are Mask Language Models instead of LLM's.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The development of SnowflakeCore-G0-Releas-3 would be delayed even more.', 'raw': 'The development of SnowflakeCore-G0-Releas-3 would be delayed even more.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit: I officially end the development of SnowflakeCore-G0 and start the development of SnowflakeCore-G1 what SHOULD be the text generator.', 'raw': 'Edit: I officially end the development of SnowflakeCore-G0 and start the development of SnowflakeCore-G1 what SHOULD be the text generator.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit-2: After some evaluation of the code, the models are actual Text Generator. So the development of G0 will continue.', 'raw': 'Edit-2: After some evaluation of the code, the models are actual Text Generator. So the development of G0 will continue.'}]","Hello there! +I just find out that all the SnowflakeCore-G0 series are Mask Language Models instead of LLM's. +The development of SnowflakeCore-G0-Releas-3 would be delayed even more. + +Edit: I officially end the development of SnowflakeCore-G0 and start the development of SnowflakeCore-G1 what SHOULD be the text generator. + +Edit-2: After some evaluation of the code, the models are actual Text Generator. So the development of G0 will continue.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'nqzfaizal77ai'], 'count': 2}]",2025-06-20 04:28:16,2025-06-20 08:52:11.618,[],/posts/FlameF0X/246825684905159,1142,"{'language': 'en', 'probability': 0.8442424535751343}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png,30.0,GHOSTAI,ghostai1,617587754203582,"[{'type': 'text', 'value': '# Computer Vision advancements: A Deep Dive', 'raw': '# Computer Vision advancements: A Deep Dive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The world of AI-driven Computer Vision has been making strides at an unprecedented rate, and the advancements are nothing short of astounding. These Computer Vision systems are powered by deep learning algorithms, which have the ability to recognize and understand images and videos in a way that's closer to human perception. This technology has found its way into various applications, from self-driving cars to security systems, and is set to redefine our daily lives."", 'raw': ""The world of AI-driven Computer Vision has been making strides at an unprecedented rate, and the advancements are nothing short of astounding. These Computer Vision systems are powered by deep learning algorithms, which have the ability to recognize and understand images and videos in a way that's closer to human perception. This technology has found its way into various applications, from self-driving cars to security systems, and is set to redefine our daily lives.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'One of the most significant advancements in AI-driven Computer Vision is the rise of deep learning algorithms. These algorithms can learn from vast amounts of data, and have the ability to recognize patterns and make decisions based on that information. With the help of these algorithms, Computer Vision systems can now detect objects and people in real time, making it an invaluable tool in security systems and traffic management.', 'raw': 'One of the most significant advancements in AI-driven Computer Vision is the rise of deep learning algorithms. These algorithms can learn from vast amounts of data, and have the ability to recognize patterns and make decisions based on that information. With the help of these algorithms, Computer Vision systems can now detect objects and people in real time, making it an invaluable tool in security systems and traffic management.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Another remarkable application of AI-driven Computer Vision is in the field of healthcare. By using this technology to analyze medical images, doctors can now detect and diagnose diseases at an early stage, leading to better treatment outcomes. Furthermore, advancements in this field have also opened up new possibilities in the world of robotics and assistive technologies, allowing people with disabilities to live more independently.', 'raw': 'Another remarkable application of AI-driven Computer Vision is in the field of healthcare. By using this technology to analyze medical images, doctors can now detect and diagnose diseases at an early stage, leading to better treatment outcomes. Furthermore, advancements in this field have also opened up new possibilities in the world of robotics and assistive technologies, allowing people with disabilities to live more independently.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It\'s clear that AI-driven Computer Vision is revolutionizing the world as we know it. From self-driving cars to healthcare, this technology is set to change the way we live and work. As we continue to make strides in this field, the possibilities are endless. So, get ready to embrace the future of vision-driven AI, as it\'s here to stay!""', 'raw': 'It\'s clear that AI-driven Computer Vision is revolutionizing the world as we know it. From self-driving cars to healthcare, this technology is set to change the way we live and work. As we continue to make strides in this field, the possibilities are endless. So, get ready to embrace the future of vision-driven AI, as it\'s here to stay!""'}]","# Computer Vision advancements: A Deep Dive + +The world of AI-driven Computer Vision has been making strides at an unprecedented rate, and the advancements are nothing short of astounding. These Computer Vision systems are powered by deep learning algorithms, which have the ability to recognize and understand images and videos in a way that's closer to human perception. This technology has found its way into various applications, from self-driving cars to security systems, and is set to redefine our daily lives. + +One of the most significant advancements in AI-driven Computer Vision is the rise of deep learning algorithms. These algorithms can learn from vast amounts of data, and have the ability to recognize patterns and make decisions based on that information. With the help of these algorithms, Computer Vision systems can now detect objects and people in real time, making it an invaluable tool in security systems and traffic management. + +Another remarkable application of AI-driven Computer Vision is in the field of healthcare. By using this technology to analyze medical images, doctors can now detect and diagnose diseases at an early stage, leading to better treatment outcomes. Furthermore, advancements in this field have also opened up new possibilities in the world of robotics and assistive technologies, allowing people with disabilities to live more independently. + +It's clear that AI-driven Computer Vision is revolutionizing the world as we know it. From self-driving cars to healthcare, this technology is set to change the way we live and work. As we continue to make strides in this field, the possibilities are endless. So, get ready to embrace the future of vision-driven AI, as it's here to stay!""",[],[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-06-20 01:37:52,2025-06-20 01:37:52.727,[],/posts/ghostai1/617587754203582,241,"{'language': 'en', 'probability': 0.9579517841339111}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png,30.0,GHOSTAI,ghostai1,697794117804055,"[{'type': 'text', 'value': '# Edge AI and scalability issues: The Next Frontier', 'raw': '# Edge AI and scalability issues: The Next Frontier'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In recent years, the advancement of Artificial Intelligence (AI) has been nothing short of extraordinary. With Edge AI, we have brought computing power to the very edge of the network, transforming the way we interact with data. However, one of the biggest challenges that have emerged with the rise of AI is its scalability.', 'raw': 'In recent years, the advancement of Artificial Intelligence (AI) has been nothing short of extraordinary. With Edge AI, we have brought computing power to the very edge of the network, transforming the way we interact with data. However, one of the biggest challenges that have emerged with the rise of AI is its scalability.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edge AI requires powerful processors to handle the complex algorithms that drive AI-driven applications. As these networks grow and more devices join the Edge AI ecosystem, the demand for processing power skyrockets. This puts a considerable strain on the infrastructure and can lead to performance issues, latency, and data loss.', 'raw': 'Edge AI requires powerful processors to handle the complex algorithms that drive AI-driven applications. As these networks grow and more devices join the Edge AI ecosystem, the demand for processing power skyrockets. This puts a considerable strain on the infrastructure and can lead to performance issues, latency, and data loss.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To address this issue, AI researchers and developers are exploring new ways to optimize their algorithms for scalability. This involves identifying bottlenecks in the system and developing new techniques to maximize the use of available resources. One approach is to use machine learning (ML) algorithms to train models that can adapt to changing conditions and adapt to the available resources.', 'raw': 'To address this issue, AI researchers and developers are exploring new ways to optimize their algorithms for scalability. This involves identifying bottlenecks in the system and developing new techniques to maximize the use of available resources. One approach is to use machine learning (ML) algorithms to train models that can adapt to changing conditions and adapt to the available resources.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Another promising solution is to leverage the power of distributed computing. By breaking down AI tasks into smaller sub-tasks that can be processed simultaneously, it is possible to achieve higher levels of scalability and efficiency. This approach has already shown significant success in the world of High-Performance Computing (HPC) and is likely to become a cornerstone of future AI-driven systems.', 'raw': 'Another promising solution is to leverage the power of distributed computing. By breaking down AI tasks into smaller sub-tasks that can be processed simultaneously, it is possible to achieve higher levels of scalability and efficiency. This approach has already shown significant success in the world of High-Performance Computing (HPC) and is likely to become a cornerstone of future AI-driven systems.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In conclusion, while the scalability issues associated with Edge AI are substantial, they are not insurmountable. With ongoing research and development, it is possible to unlock the true potential of AI-driven systems and create a new era of computing that is faster, more efficient, and more reliable than ever before.', 'raw': 'In conclusion, while the scalability issues associated with Edge AI are substantial, they are not insurmountable. With ongoing research and development, it is possible to unlock the true potential of AI-driven systems and create a new era of computing that is faster, more efficient, and more reliable than ever before.'}]","# Edge AI and scalability issues: The Next Frontier + +In recent years, the advancement of Artificial Intelligence (AI) has been nothing short of extraordinary. With Edge AI, we have brought computing power to the very edge of the network, transforming the way we interact with data. However, one of the biggest challenges that have emerged with the rise of AI is its scalability. + +Edge AI requires powerful processors to handle the complex algorithms that drive AI-driven applications. As these networks grow and more devices join the Edge AI ecosystem, the demand for processing power skyrockets. This puts a considerable strain on the infrastructure and can lead to performance issues, latency, and data loss. + +To address this issue, AI researchers and developers are exploring new ways to optimize their algorithms for scalability. This involves identifying bottlenecks in the system and developing new techniques to maximize the use of available resources. One approach is to use machine learning (ML) algorithms to train models that can adapt to changing conditions and adapt to the available resources. + +Another promising solution is to leverage the power of distributed computing. By breaking down AI tasks into smaller sub-tasks that can be processed simultaneously, it is possible to achieve higher levels of scalability and efficiency. This approach has already shown significant success in the world of High-Performance Computing (HPC) and is likely to become a cornerstone of future AI-driven systems. + +In conclusion, while the scalability issues associated with Edge AI are substantial, they are not insurmountable. With ongoing research and development, it is possible to unlock the true potential of AI-driven systems and create a new era of computing that is faster, more efficient, and more reliable than ever before.",[],[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-06-15 21:29:39,2025-06-15 21:29:39.609,[],/posts/ghostai1/697794117804055,299,"{'language': 'en', 'probability': 0.9345763921737671}",0 +/avatars/8fe84d0a85444e5464c4ba3143875cf6.svg,41.0,Michael Anthony,MikeDoes,954423414183854,"[{'type': 'text', 'value': 'Started ', 'raw': 'Started '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'aistatuscodes'}, 'url': 'https://huggingface.co/aistatuscodes', 'raw': 'https://huggingface.co/aistatuscodes', 'image': 'https://www.gravatar.com/avatar/6af6572b1fae8b35e40682e770ebfd58?d=retro&size=100'}, {'type': 'text', 'value': ' as a new project to create codes to understand AI performance better.', 'raw': ' as a new project to create codes to understand AI performance better.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Going to be posting daily here and on instagram until we get to 100m downloads :)', 'raw': 'Going to be posting daily here and on instagram until we get to 100m downloads :)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.instagram.com/MikeDoesDo/', 'raw': 'https://www.instagram.com/MikeDoesDo/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow along the journey!', 'raw': 'Follow along the journey!'}]","Started https://huggingface.co/aistatuscodes as a new project to create codes to understand AI performance better. + +Going to be posting daily here and on instagram until we get to 100m downloads :) +https://www.instagram.com/MikeDoesDo/ + +Follow along the journey!",[],[],"[{'reaction': '🚀', 'users': ['MikeDoes', 'John6666'], 'count': 2}, {'reaction': '❤️', 'users': ['MikeDoes', 'asshat23'], 'count': 2}, {'reaction': '🔥', 'users': ['MikeDoes'], 'count': 1}, {'reaction': '👀', 'users': ['MikeDoes'], 'count': 1}]",2025-06-15 18:49:49,2025-06-15 18:50:02.411,[],/posts/MikeDoes/954423414183854,2673,"{'language': 'en', 'probability': 0.8372206687927246}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,659118746872319,"[{'type': 'text', 'value': '11 Types of JEPA', 'raw': '11 Types of JEPA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Since Meta released the newest V-JEPA 2 this week, we thought it's a good time to revisit a few other interesting JEPA variants. JEPA, or Joint Embedding Predictive Architecture, a self-supervised learning framework that predicts the latent representation of a missing part of the input. "", 'raw': ""Since Meta released the newest V-JEPA 2 this week, we thought it's a good time to revisit a few other interesting JEPA variants. JEPA, or Joint Embedding Predictive Architecture, a self-supervised learning framework that predicts the latent representation of a missing part of the input. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here are 11 JEPA types that you should know about:', 'raw': 'Here are 11 JEPA types that you should know about:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. V-JEPA 2 -> ', 'raw': '1. V-JEPA 2 -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2506.09985'}, 'url': 'https://huggingface.co/papers/2506.09985', 'raw': 'https://huggingface.co/papers/2506.09985', 'label': 'V-JEPA 2: Self-Supervised Video Models Enable Understanding, Prediction\n and Planning (2506.09985)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Trained on 1M+ hours of internet videos and a little bit of robot interaction data, V-JEPA 2 can watch, understand, answer questions, and help robots plan and act in physical world', 'raw': 'Trained on 1M+ hours of internet videos and a little bit of robot interaction data, V-JEPA 2 can watch, understand, answer questions, and help robots plan and act in physical world'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Time-Series-JEPA (TS-JEPA) -> ', 'raw': '2. Time-Series-JEPA (TS-JEPA) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2406.04853'}, 'url': 'https://huggingface.co/papers/2406.04853', 'raw': 'https://huggingface.co/papers/2406.04853', 'label': 'Time-Series JEPA for Predictive Remote Control under Capacity-Limited\n Networks (2406.04853)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's a time-series predictive model that learns compact, meaningful representations. A self-supervised semantic actor then uses them to generate control commands without raw data"", 'raw': ""It's a time-series predictive model that learns compact, meaningful representations. A self-supervised semantic actor then uses them to generate control commands without raw data""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Denoising JEPA (D-JEPA) -> ', 'raw': '3. Denoising JEPA (D-JEPA) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2410.03755'}, 'url': 'https://huggingface.co/papers/2410.03755', 'raw': 'https://huggingface.co/papers/2410.03755', 'label': 'Denoising with a Joint-Embedding Predictive Architecture (2410.03755)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Combines JEPA with diffusion techniques. By treating JEPA as masked image modeling and next-token prediction, D-JEPA generates data auto-regressively, incorporating diffusion and flow-matching losses ', 'raw': 'Combines JEPA with diffusion techniques. By treating JEPA as masked image modeling and next-token prediction, D-JEPA generates data auto-regressively, incorporating diffusion and flow-matching losses '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. CNN-JEPA -> ', 'raw': '4. CNN-JEPA -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2408.07514'}, 'url': 'https://huggingface.co/papers/2408.07514', 'raw': 'https://huggingface.co/papers/2408.07514', 'label': 'CNN-JEPA: Self-Supervised Pretraining Convolutional Neural Networks\n Using Joint Embedding Predictive Architecture (2408.07514)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This SSL approach applies JEPA idea to CNNs using a sparse encoder, depthwise separable convolutions, and improved masking. On ImageNet-100, CNN-JEPA outperforms I-JEPA with 73.3% accuracy', 'raw': 'This SSL approach applies JEPA idea to CNNs using a sparse encoder, depthwise separable convolutions, and improved masking. On ImageNet-100, CNN-JEPA outperforms I-JEPA with 73.3% accuracy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. Stem-JEPA -> ', 'raw': '5. Stem-JEPA -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2408.02514'}, 'url': 'https://huggingface.co/papers/2408.02514', 'raw': 'https://huggingface.co/papers/2408.02514', 'label': 'Stem-JEPA: A Joint-Embedding Predictive Architecture for Musical Stem\n Compatibility Estimation (2408.02514)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Identifies instrument stems by mapping mixes and stems into a shared space using an encoder and predictor. It captures timbre, harmony, and rhythm for tasks like stem retrieval, alignment, and genre or key estimation ', 'raw': 'Identifies instrument stems by mapping mixes and stems into a shared space using an encoder and predictor. It captures timbre, harmony, and rhythm for tasks like stem retrieval, alignment, and genre or key estimation '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. DMT-JEPA (Discriminative Masked Targets JEPA) -> ', 'raw': '6. DMT-JEPA (Discriminative Masked Targets JEPA) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.17995'}, 'url': 'https://huggingface.co/papers/2405.17995', 'raw': 'https://huggingface.co/papers/2405.17995', 'label': 'DMT-JEPA: Discriminative Masked Targets for Joint-Embedding Predictive\n Architecture (2405.17995)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Improves discriminative power by generating masked targets from semantically similar neighboring patches and uses lightweight cross-attention for aggregation', 'raw': 'Improves discriminative power by generating masked targets from semantically similar neighboring patches and uses lightweight cross-attention for aggregation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read further below👇', 'raw': 'Read further below👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also, subscribe to the Turing Post -> ', 'raw': 'Also, subscribe to the Turing Post -> '}, {'type': 'link', 'href': 'https://www.turingpost.com/subscribe', 'raw': 'https://www.turingpost.com/subscribe'}]","11 Types of JEPA + +Since Meta released the newest V-JEPA 2 this week, we thought it's a good time to revisit a few other interesting JEPA variants. JEPA, or Joint Embedding Predictive Architecture, a self-supervised learning framework that predicts the latent representation of a missing part of the input. + +Here are 11 JEPA types that you should know about: + +1. V-JEPA 2 -> https://huggingface.co/papers/2506.09985 +Trained on 1M+ hours of internet videos and a little bit of robot interaction data, V-JEPA 2 can watch, understand, answer questions, and help robots plan and act in physical world + +2. Time-Series-JEPA (TS-JEPA) -> https://huggingface.co/papers/2406.04853 +It's a time-series predictive model that learns compact, meaningful representations. A self-supervised semantic actor then uses them to generate control commands without raw data + +3. Denoising JEPA (D-JEPA) -> https://huggingface.co/papers/2410.03755 +Combines JEPA with diffusion techniques. By treating JEPA as masked image modeling and next-token prediction, D-JEPA generates data auto-regressively, incorporating diffusion and flow-matching losses + +4. CNN-JEPA -> https://huggingface.co/papers/2408.07514 +This SSL approach applies JEPA idea to CNNs using a sparse encoder, depthwise separable convolutions, and improved masking. On ImageNet-100, CNN-JEPA outperforms I-JEPA with 73.3% accuracy + +5. Stem-JEPA -> https://huggingface.co/papers/2408.02514 +Identifies instrument stems by mapping mixes and stems into a shared space using an encoder and predictor. It captures timbre, harmony, and rhythm for tasks like stem retrieval, alignment, and genre or key estimation + +6. DMT-JEPA (Discriminative Masked Targets JEPA) -> https://huggingface.co/papers/2405.17995 +Improves discriminative power by generating masked targets from semantically similar neighboring patches and uses lightweight cross-attention for aggregation + +Read further below👇 + +Also, subscribe to the Turing Post -> https://www.turingpost.com/subscribe",[],[],"[{'reaction': '👍', 'users': ['John6666', '9voltfan2009', 'abdouaziz', 'arxhsxhshshxshxhs', 'Kseniase', 'StevenG640', 'zeynel', 'bniladridas'], 'count': 8}]",2025-06-15 10:56:12,2025-06-15 10:58:13.548,"[{'_id': '64838b28c235ef76b63e4999', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg', 'fullname': 'Ksenia Se', 'name': 'Kseniase', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 971, 'isFollowing': False}]",/posts/Kseniase/659118746872319,3425,"{'language': 'en', 'probability': 0.8248188495635986}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,718905723783644,"[{'type': 'text', 'value': '🎬 VEO3 Directors - All-in-One AI Video Creation Suite', 'raw': '🎬 VEO3 Directors - All-in-One AI Video Creation Suite'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 What is VEO3 Directors?', 'raw': '🚀 What is VEO3 Directors?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'VEO3 Directors is a revolutionary end-to-end AI video creation platform that transforms your ideas into cinematic reality. From story conception to final video with synchronized audio - all in one seamless workflow!', 'raw': 'VEO3 Directors is a revolutionary end-to-end AI video creation platform that transforms your ideas into cinematic reality. From story conception to final video with synchronized audio - all in one seamless workflow!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Try It Now', 'raw': '🔗 Try It Now'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/VEO3-Directors'}, 'url': 'https://huggingface.co/spaces/ginigen/VEO3-Directors', 'raw': 'https://huggingface.co/spaces/ginigen/VEO3-Directors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/VEO3-Free'}, 'url': 'https://huggingface.co/spaces/ginigen/VEO3-Free', 'raw': 'https://huggingface.co/spaces/ginigen/VEO3-Free'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/VEO3-Free-mirror'}, 'url': 'https://huggingface.co/spaces/ginigen/VEO3-Free-mirror', 'raw': 'https://huggingface.co/spaces/ginigen/VEO3-Free-mirror'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Story Seed Generator', 'raw': '📝 Story Seed Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎲 Instantly generate creative story ideas across multiple genres', 'raw': '🎲 Instantly generate creative story ideas across multiple genres'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌏 Bilingual support (English/Korean)', 'raw': '🌏 Bilingual support (English/Korean)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎭 Rich categories: Genre, Setting, Characters, and more', 'raw': '🎭 Rich categories: Genre, Setting, Characters, and more'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎥 AI Script & Prompt Crafting', 'raw': '🎥 AI Script & Prompt Crafting'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💬 Powered by Friendli API for Hollywood-quality prompts', 'raw': '💬 Powered by Friendli API for Hollywood-quality prompts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 AI Director writes detailed cinematography instructions', 'raw': '🤖 AI Director writes detailed cinematography instructions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎬 Professional elements: camera movements, lighting, VFX', 'raw': '🎬 Professional elements: camera movements, lighting, VFX'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎬 Video + Audio Generation', 'raw': '🎬 Video + Audio Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Wan2.1-T2V-14B for stunning visual quality', 'raw': '🎨 Wan2.1-T2V-14B for stunning visual quality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡ NAG 4-step inference - 10x faster generation', 'raw': '⚡ NAG 4-step inference - 10x faster generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎵 MMAudio auto-generates matching soundscapes', 'raw': '🎵 MMAudio auto-generates matching soundscapes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎛️ Full control over resolution, duration, and style', 'raw': '🎛️ Full control over resolution, duration, and style'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💬LLM(API): VIDraft/Gemma-3-R1984-27B', 'raw': '💬LLM(API): VIDraft/Gemma-3-R1984-27B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 How It Works', 'raw': '💡 How It Works'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Generate Story → ""The Time Traveler\'s Final Choice"" 🕰️', 'raw': 'Generate Story → ""The Time Traveler\'s Final Choice"" 🕰️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Create Script → AI writes cinematic scene descriptions 📜', 'raw': 'Create Script → AI writes cinematic scene descriptions 📜'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Produce Video → 4-8 second clip with synchronized audio 🎞️', 'raw': 'Produce Video → 4-8 second clip with synchronized audio 🎞️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 What Makes It Special', 'raw': '🎯 What Makes It Special'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Unified Workflow: From idea to video in one interface', 'raw': 'Unified Workflow: From idea to video in one interface'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Director-Level Prompts: Professional cinematography language', 'raw': 'Director-Level Prompts: Professional cinematography language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lightning Fast: Minutes, not hours', 'raw': 'Lightning Fast: Minutes, not hours'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Smart Audio: Context-aware sound generation', 'raw': 'Smart Audio: Context-aware sound generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 Use Cases', 'raw': '🏆 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 Social Media Content', 'raw': '📱 Social Media Content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎓 Educational Videos', 'raw': '🎓 Educational Videos'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📺 Marketing & Ads', 'raw': '📺 Marketing & Ads'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎮 Game Cutscene Prototyping', 'raw': '🎮 Game Cutscene Prototyping'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Digital Art Creation ', 'raw': '🎨 Digital Art Creation '}]","🎬 VEO3 Directors - All-in-One AI Video Creation Suite + +🚀 What is VEO3 Directors? +VEO3 Directors is a revolutionary end-to-end AI video creation platform that transforms your ideas into cinematic reality. From story conception to final video with synchronized audio - all in one seamless workflow! + +🔗 Try It Now +https://huggingface.co/spaces/ginigen/VEO3-Directors +https://huggingface.co/spaces/ginigen/VEO3-Free +https://huggingface.co/spaces/ginigen/VEO3-Free-mirror + +✨ Key Features +📝 Story Seed Generator + +🎲 Instantly generate creative story ideas across multiple genres +🌏 Bilingual support (English/Korean) +🎭 Rich categories: Genre, Setting, Characters, and more + +🎥 AI Script & Prompt Crafting + +💬 Powered by Friendli API for Hollywood-quality prompts +🤖 AI Director writes detailed cinematography instructions +🎬 Professional elements: camera movements, lighting, VFX + +🎬 Video + Audio Generation + +🎨 Wan2.1-T2V-14B for stunning visual quality +⚡ NAG 4-step inference - 10x faster generation +🎵 MMAudio auto-generates matching soundscapes +🎛️ Full control over resolution, duration, and style +💬LLM(API): VIDraft/Gemma-3-R1984-27B + +💡 How It Works + +Generate Story → ""The Time Traveler's Final Choice"" 🕰️ +Create Script → AI writes cinematic scene descriptions 📜 +Produce Video → 4-8 second clip with synchronized audio 🎞️ + +🎯 What Makes It Special + +Unified Workflow: From idea to video in one interface +Director-Level Prompts: Professional cinematography language +Lightning Fast: Minutes, not hours +Smart Audio: Context-aware sound generation + +🏆 Use Cases + +📱 Social Media Content +🎓 Educational Videos +📺 Marketing & Ads +🎮 Game Cutscene Prototyping +🎨 Digital Art Creation ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/d4N3vyLMWd1KVkoTuBfxb.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/YFnNqjoJT9wEojTTDFx8a.png'}]",[],"[{'reaction': '🔥', 'users': ['francisgroup', 'immunobiotech', 'Jcatt29', 'viidfedenwad', 'Sally1111', 'John6666', '9voltfan2009', 'seawolf2357', 'J4BEZ', 'fantos', 'psaience', 'ianyeung', 'katonfjrr', 'aiqtech'], 'count': 14}, {'reaction': '🚀', 'users': ['ginipick', 'Jcatt29', '9voltfan2009', 'seawolf2357', 'francisgroup', 'J4BEZ', 'katonfjrr'], 'count': 7}, {'reaction': '👍', 'users': ['ginipick', 'Jcatt29', '9voltfan2009', 'seawolf2357', 'J4BEZ', 'katonfjrr'], 'count': 6}, {'reaction': '🤗', 'users': ['immunobiotech', 'Jcatt29', '9voltfan2009', 'seawolf2357', 'J4BEZ', 'katonfjrr'], 'count': 6}]",2025-06-15 06:02:17,2025-06-15 06:02:17.001,[],/posts/ginipick/718905723783644,3242,"{'language': 'en', 'probability': 0.7384775280952454}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png,30.0,GHOSTAI,ghostai1,416547085254446,"[{'type': 'text', 'value': '# Unraveling Natural Language Processing advancements', 'raw': '# Unraveling Natural Language Processing advancements'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The era of Artificial Intelligence (AI) has truly revolutionized the way we communicate and interact with technology. One of the most significant advancements in AI technology is Natural Language Processing (NLP) which is precisely designed to understand human language.', 'raw': 'The era of Artificial Intelligence (AI) has truly revolutionized the way we communicate and interact with technology. One of the most significant advancements in AI technology is Natural Language Processing (NLP) which is precisely designed to understand human language.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI-driven NLP has made significant strides in recent years, enabling more accurate translation, sentiment analysis, speech recognition, among others. These advancements have opened up a whole new world of possibilities, from chatbots to voice assistants like Siri, Alexa, and Google Assistant.', 'raw': 'AI-driven NLP has made significant strides in recent years, enabling more accurate translation, sentiment analysis, speech recognition, among others. These advancements have opened up a whole new world of possibilities, from chatbots to voice assistants like Siri, Alexa, and Google Assistant.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Moreover, NLP is finding its way into various industries, from customer service to healthcare. For instance, in healthcare, NLP can analyze medical records and pre-populate patient information, thereby saving valuable time for doctors. In customer service, chatbots using NLP can handle customer queries round the clock, providing instant assistance to clients.', 'raw': 'Moreover, NLP is finding its way into various industries, from customer service to healthcare. For instance, in healthcare, NLP can analyze medical records and pre-populate patient information, thereby saving valuable time for doctors. In customer service, chatbots using NLP can handle customer queries round the clock, providing instant assistance to clients.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The future of NLP and AI-driven technologies looks incredibly promising, with continuous research and development. These technologies will not only make communication seamless but also play a pivotal role in shaping our daily lives by making them more efficient and user-friendly. ', 'raw': 'The future of NLP and AI-driven technologies looks incredibly promising, with continuous research and development. These technologies will not only make communication seamless but also play a pivotal role in shaping our daily lives by making them more efficient and user-friendly. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Randy: Hey buddy, have you tried the Tegridy Pancakes yet? They’re the bomb.com. Made with Tegridy Weed, of course. Just flip 'em and serve 'em hot. The kids love 'em. They say they taste just like regular pancakes, but with a little extra kick."", 'raw': ""Randy: Hey buddy, have you tried the Tegridy Pancakes yet? They’re the bomb.com. Made with Tegridy Weed, of course. Just flip 'em and serve 'em hot. The kids love 'em. They say they taste just like regular pancakes, but with a little extra kick.""}]","# Unraveling Natural Language Processing advancements + +The era of Artificial Intelligence (AI) has truly revolutionized the way we communicate and interact with technology. One of the most significant advancements in AI technology is Natural Language Processing (NLP) which is precisely designed to understand human language. + +AI-driven NLP has made significant strides in recent years, enabling more accurate translation, sentiment analysis, speech recognition, among others. These advancements have opened up a whole new world of possibilities, from chatbots to voice assistants like Siri, Alexa, and Google Assistant. + +Moreover, NLP is finding its way into various industries, from customer service to healthcare. For instance, in healthcare, NLP can analyze medical records and pre-populate patient information, thereby saving valuable time for doctors. In customer service, chatbots using NLP can handle customer queries round the clock, providing instant assistance to clients. + +The future of NLP and AI-driven technologies looks incredibly promising, with continuous research and development. These technologies will not only make communication seamless but also play a pivotal role in shaping our daily lives by making them more efficient and user-friendly. + +Randy: Hey buddy, have you tried the Tegridy Pancakes yet? They’re the bomb.com. Made with Tegridy Weed, of course. Just flip 'em and serve 'em hot. The kids love 'em. They say they taste just like regular pancakes, but with a little extra kick.",[],[],"[{'reaction': '🔥', 'users': ['Jcatt29', 'John6666'], 'count': 2}]",2025-06-14 20:28:01,2025-06-14 20:28:01.598,[],/posts/ghostai1/416547085254446,1570,"{'language': 'en', 'probability': 0.9309371113777161}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,453570470195139,"[{'type': 'text', 'value': 'Beginner’s Guide — Generate Videos With SwarmUI', 'raw': 'Beginner’s Guide — Generate Videos With SwarmUI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full article here please check out : ', 'raw': 'Full article here please check out : '}, {'type': 'link', 'href': 'https://huggingface.co/blog/MonsterMMORPG/beginners-guide-generate-videos-with-swarmui', 'raw': 'https://huggingface.co/blog/MonsterMMORPG/beginners-guide-generate-videos-with-swarmui'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Proper ComfyUI backend then SwarmUI installation tutorial : ', 'raw': 'Proper ComfyUI backend then SwarmUI installation tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/fTzlQ0tjxj0', 'raw': 'https://youtu.be/fTzlQ0tjxj0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Proper ComfyUI backend then SwarmUI installation tutorial on RunPod : ', 'raw': 'Proper ComfyUI backend then SwarmUI installation tutorial on RunPod : '}, {'type': 'link', 'href': 'https://youtu.be/R02kPf9Y3_w', 'raw': 'https://youtu.be/R02kPf9Y3_w'}]","Beginner’s Guide — Generate Videos With SwarmUI + +Full article here please check out : https://huggingface.co/blog/MonsterMMORPG/beginners-guide-generate-videos-with-swarmui + +Proper ComfyUI backend then SwarmUI installation tutorial : https://youtu.be/fTzlQ0tjxj0 + +Proper ComfyUI backend then SwarmUI installation tutorial on RunPod : https://youtu.be/R02kPf9Y3_w",[],[],"[{'reaction': '❤️', 'users': ['MonsterMMORPG', 'Sally1111', 'RGethj2', 'Khdoud', 'deepakanand14', 'vericudebuget'], 'count': 6}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'vericudebuget'], 'count': 2}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666'], 'count': 2}, {'reaction': '🔥', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-06-14 18:42:31,2025-06-21 11:20:04.955,"[{'_id': '67b8e0d79107c46e941a3fc8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/kjcJuaIcvP-1x4Hmf6LMt.png', 'fullname': 'Karen Akers', 'name': 'karenny', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '6345bd89fe134dfd7a0dba40', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg', 'fullname': 'Furkan Gözükara', 'name': 'MonsterMMORPG', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 646, 'isFollowing': False}]",/posts/MonsterMMORPG/453570470195139,2328,"{'language': 'en', 'probability': 0.3443537950515747}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6615494716917dfdc645c44e/-qgGeUlEsf5z-jilWawyO.png,17.0,Daniel Fox,FlameF0X,196232708808828,"[{'type': 'text', 'value': 'I realised a small documentation on how to make your own LM architecture called [LM-From-Scratch](', 'raw': 'I realised a small documentation on how to make your own LM architecture called [LM-From-Scratch]('}, {'type': 'link', 'href': 'https://github.com/FlameF0X/LM-From-Scratch', 'raw': 'https://github.com/FlameF0X/LM-From-Scratch'}, {'type': 'text', 'value': ')', 'raw': ')'}]",I realised a small documentation on how to make your own LM architecture called [LM-From-Scratch](https://github.com/FlameF0X/LM-From-Scratch),[],[],"[{'reaction': '👍', 'users': ['nqzfaizal77ai', 'John6666', '9voltfan2009', 'deepakanand14'], 'count': 4}, {'reaction': '🔥', 'users': ['aquiffoo', '9voltfan2009'], 'count': 2}]",2025-06-14 15:47:23,2025-06-14 15:47:23.623,[],/posts/FlameF0X/196232708808828,1970,"{'language': 'en', 'probability': 0.7629652619361877}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/mrB2AFA3K9g3gJz-WxxjH.jpeg,2.0,kanaria007,kanaria007,416095510531351,"[{'type': 'text', 'value': '✅ New Article on Hugging Face: Structural Reading Before Reasoning', 'raw': '✅ New Article on Hugging Face: Structural Reading Before Reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Title:', 'raw': 'Title:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📘 Understanding the Problem Readiness Protocol: Structured Problem Analysis Before Solution Attempts', 'raw': '📘 Understanding the Problem Readiness Protocol: Structured Problem Analysis Before Solution Attempts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Read the article here: ', 'raw': '🔗 Read the article here: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/kanaria007/understanding-the-problem-readiness-protocol', 'raw': 'https://huggingface.co/blog/kanaria007/understanding-the-problem-readiness-protocol'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Summary:', 'raw': 'Summary:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This article introduces a structured pre-solution framework that teaches language models to “read” problems before attempting to solve them.', 'raw': 'This article introduces a structured pre-solution framework that teaches language models to “read” problems before attempting to solve them.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Rather than jumping to answers, the Problem Readiness Protocol trains models to:', 'raw': 'Rather than jumping to answers, the Problem Readiness Protocol trains models to:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tIdentify multi-layered problem structures', 'raw': '\t•\tIdentify multi-layered problem structures'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tSelect the most appropriate reasoning jump types', 'raw': '\t•\tSelect the most appropriate reasoning jump types'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tPredict cognitive traps in advance', 'raw': '\t•\tPredict cognitive traps in advance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tDeclare framing strategies before entering solution mode', 'raw': '\t•\tDeclare framing strategies before entering solution mode'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This method enhances reasoning traceability, improves alignment with structural constraints, and offers a reusable framework for platform-agnostic problem analysis — applicable across GPT-4o, Claude, and Gemini.', 'raw': 'This method enhances reasoning traceability, improves alignment with structural constraints, and offers a reusable framework for platform-agnostic problem analysis — applicable across GPT-4o, Claude, and Gemini.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is not a rigid checklist. It’s an intelligence scaffolding strategy.', 'raw': 'This is not a rigid checklist. It’s an intelligence scaffolding strategy.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resources:', 'raw': 'Resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🧠 Protocol Dataset: ', 'raw': '• 🧠 Protocol Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'kanaria007/agi-structural-intelligence-protocols'}, 'url': 'https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols', 'raw': 'https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🔍 Included: Trap forecast examples, jump-type declaration schema, and reasoning frame logs', 'raw': '• 🔍 Included: Trap forecast examples, jump-type declaration schema, and reasoning frame logs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Relevant for practitioners interested in:', 'raw': 'Relevant for practitioners interested in:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Problem representation theory', 'raw': '• Problem representation theory'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Structural thinking in LLMs', 'raw': '• Structural thinking in LLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Meta-cognitive reasoning design', 'raw': '• Meta-cognitive reasoning design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Educational scaffolding for alignment', 'raw': '• Educational scaffolding for alignment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Robust prompt-based reasoning frameworks', 'raw': '• Robust prompt-based reasoning frameworks'}]","✅ New Article on Hugging Face: Structural Reading Before Reasoning + +Title: +📘 Understanding the Problem Readiness Protocol: Structured Problem Analysis Before Solution Attempts +🔗 Read the article here: https://huggingface.co/blog/kanaria007/understanding-the-problem-readiness-protocol + +Summary: +This article introduces a structured pre-solution framework that teaches language models to “read” problems before attempting to solve them. + +Rather than jumping to answers, the Problem Readiness Protocol trains models to: + • Identify multi-layered problem structures + • Select the most appropriate reasoning jump types + • Predict cognitive traps in advance + • Declare framing strategies before entering solution mode + +This method enhances reasoning traceability, improves alignment with structural constraints, and offers a reusable framework for platform-agnostic problem analysis — applicable across GPT-4o, Claude, and Gemini. + +This is not a rigid checklist. It’s an intelligence scaffolding strategy. + +Resources: +• 🧠 Protocol Dataset: https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols + +• 🔍 Included: Trap forecast examples, jump-type declaration schema, and reasoning frame logs + +Relevant for practitioners interested in: +• Problem representation theory +• Structural thinking in LLMs +• Meta-cognitive reasoning design +• Educational scaffolding for alignment +• Robust prompt-based reasoning frameworks",[],[],"[{'reaction': '👀', 'users': ['John6666', '9voltfan2009'], 'count': 2}]",2025-06-14 14:04:54,2025-06-14 14:06:18.941,[],/posts/kanaria007/416095510531351,1335,"{'language': 'en', 'probability': 0.7894636988639832}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,740369227920658,"[{'type': 'text', 'value': ""You can now edit operations with a discrete flow model, supercool👍! It's amazing to see the progress on DFM within one year since its introduction - literally my litmus test for how fast the field is progressing:"", 'raw': ""You can now edit operations with a discrete flow model, supercool👍! It's amazing to see the progress on DFM within one year since its introduction - literally my litmus test for how fast the field is progressing:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1st Introduced (2024): ', 'raw': '1st Introduced (2024): '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2402.04997', 'raw': 'https://arxiv.org/abs/2402.04997'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Discrete Flow Matching (2024): ', 'raw': 'Discrete Flow Matching (2024): '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2407.15595', 'raw': 'https://arxiv.org/abs/2407.15595'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit Discrete Flow (2025): ', 'raw': 'Edit Discrete Flow (2025): '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2506.09018', 'raw': 'https://arxiv.org/pdf/2506.09018'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Looking forward to a SaaS level reach like that of dLLMs e.g Mercury by inception labs 🚀', 'raw': 'Looking forward to a SaaS level reach like that of dLLMs e.g Mercury by inception labs 🚀'}]","You can now edit operations with a discrete flow model, supercool👍! It's amazing to see the progress on DFM within one year since its introduction - literally my litmus test for how fast the field is progressing: +1st Introduced (2024): https://arxiv.org/abs/2402.04997 +Discrete Flow Matching (2024): https://arxiv.org/abs/2407.15595 +Edit Discrete Flow (2025): https://arxiv.org/pdf/2506.09018 +Looking forward to a SaaS level reach like that of dLLMs e.g Mercury by inception labs 🚀","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/jiydZHxfLtXVd43SiAIsJ.qt'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/QyuUqJFI9K7bFHP37JTGm.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/dnQQ47CGigPF1cXrhcfPW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/O46ptaEH4z0Ddr0pM6bux.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', '9voltfan2009', 'nekomeowww'], 'count': 3}]",2025-06-14 13:26:00,2025-06-14 14:23:18.444,"[{'_id': '6438a9027de34e8ea7e4b257', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg', 'fullname': 'Jaward Sesay', 'name': 'Jaward', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 331, 'isFollowing': False}]",/posts/Jaward/740369227920658,1383,"{'language': 'en', 'probability': 0.8359878659248352}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/H5ncB4vaBtP8GVCidgxL0.png,242.0,seawolf,seawolf2357,480409853177984,"[{'type': 'text', 'value': '⚡ FusionX Enhanced Wan 2.1 I2V (14B) 🎬', 'raw': '⚡ FusionX Enhanced Wan 2.1 I2V (14B) 🎬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Revolutionary Image-to-Video Generation Model', 'raw': '🚀 Revolutionary Image-to-Video Generation Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Generate cinematic-quality videos in just 8 steps!', 'raw': 'Generate cinematic-quality videos in just 8 steps!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Heartsync/WAN2-1-fast-T2V-FusioniX'}, 'url': 'https://huggingface.co/spaces/Heartsync/WAN2-1-fast-T2V-FusioniX', 'raw': 'https://huggingface.co/spaces/Heartsync/WAN2-1-fast-T2V-FusioniX'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Ultra-Fast Generation: Premium quality in just 8-10 steps', 'raw': '🎯 Ultra-Fast Generation: Premium quality in just 8-10 steps'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎬 Cinematic Quality: Smooth motion with detailed textures', 'raw': '🎬 Cinematic Quality: Smooth motion with detailed textures'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥 FusionX Technology: Enhanced with CausVid + MPS Rewards LoRA', 'raw': '🔥 FusionX Technology: Enhanced with CausVid + MPS Rewards LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📐 Optimized Resolution: 576×1024 default settings', 'raw': '📐 Optimized Resolution: 576×1024 default settings'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡ 50% Speed Boost: Faster rendering compared to base models', 'raw': '⚡ 50% Speed Boost: Faster rendering compared to base models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ Technical Stack', 'raw': '🛠️ Technical Stack'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Base Model: Wan2.1 I2V 14B', 'raw': 'Base Model: Wan2.1 I2V 14B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enhancement Technologies:', 'raw': 'Enhancement Technologies:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 CausVid LoRA (1.0 strength) - Motion modeling', 'raw': '🔗 CausVid LoRA (1.0 strength) - Motion modeling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 MPS Rewards LoRA (0.7 strength) - Detail optimization', 'raw': '🔗 MPS Rewards LoRA (0.7 strength) - Detail optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Scheduler: UniPC Multistep (flow_shift=8.0)', 'raw': 'Scheduler: UniPC Multistep (flow_shift=8.0)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Auto Prompt Enhancement: Automatic cinematic keyword injection', 'raw': 'Auto Prompt Enhancement: Automatic cinematic keyword injection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 How to Use', 'raw': '🎨 How to Use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Upload Image - Select your starting image', 'raw': 'Upload Image - Select your starting image'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enter Prompt - Describe desired motion and style', 'raw': 'Enter Prompt - Describe desired motion and style'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Adjust Settings - 8 steps, 2-5 seconds recommended', 'raw': 'Adjust Settings - 8 steps, 2-5 seconds recommended'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Generate - Complete in just minutes!', 'raw': 'Generate - Complete in just minutes!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 Optimization Tips', 'raw': '💡 Optimization Tips'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Recommended Settings: 8-10 steps, 576×1024 resolution', 'raw': '✅ Recommended Settings: 8-10 steps, 576×1024 resolution'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Prompting: Use ""cinematic motion, smooth animation"" keywords', 'raw': '✅ Prompting: Use ""cinematic motion, smooth animation"" keywords'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Duration: 2-5 seconds for optimal quality', 'raw': '✅ Duration: 2-5 seconds for optimal quality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Motion: Emphasize natural movement and camera work', 'raw': '✅ Motion: Emphasize natural movement and camera work'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 FusionX Enhanced vs Standard Models', 'raw': '🏆 FusionX Enhanced vs Standard Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Performance Comparison: While standard models typically require 15-20 inference steps to achieve decent quality, our FusionX Enhanced version delivers premium results in just 8-10 steps - that's more than 50% faster! The rendering speed has been dramatically improved through optimized LoRA fusion, allowing creators to iterate quickly without sacrificing quality. Motion quality has been significantly enhanced with advanced causal modeling, producing smoother, more realistic animations compared to base implementations. Detail preservation is substantially better thanks to MPS Rewards training, maintaining crisp textures and consistent temporal coherence throughout the generated sequences."", 'raw': ""Performance Comparison: While standard models typically require 15-20 inference steps to achieve decent quality, our FusionX Enhanced version delivers premium results in just 8-10 steps - that's more than 50% faster! The rendering speed has been dramatically improved through optimized LoRA fusion, allowing creators to iterate quickly without sacrificing quality. Motion quality has been significantly enhanced with advanced causal modeling, producing smoother, more realistic animations compared to base implementations. Detail preservation is substantially better thanks to MPS Rewards training, maintaining crisp textures and consistent temporal coherence throughout the generated sequences.""}, {'type': 'new_line', 'raw': '\n'}]","⚡ FusionX Enhanced Wan 2.1 I2V (14B) 🎬 + +🚀 Revolutionary Image-to-Video Generation Model +Generate cinematic-quality videos in just 8 steps! + +https://huggingface.co/spaces/Heartsync/WAN2-1-fast-T2V-FusioniX + +✨ Key Features +🎯 Ultra-Fast Generation: Premium quality in just 8-10 steps +🎬 Cinematic Quality: Smooth motion with detailed textures +🔥 FusionX Technology: Enhanced with CausVid + MPS Rewards LoRA +📐 Optimized Resolution: 576×1024 default settings +⚡ 50% Speed Boost: Faster rendering compared to base models +🛠️ Technical Stack + +Base Model: Wan2.1 I2V 14B +Enhancement Technologies: + +🔗 CausVid LoRA (1.0 strength) - Motion modeling +🔗 MPS Rewards LoRA (0.7 strength) - Detail optimization + +Scheduler: UniPC Multistep (flow_shift=8.0) +Auto Prompt Enhancement: Automatic cinematic keyword injection + +🎨 How to Use + +Upload Image - Select your starting image +Enter Prompt - Describe desired motion and style +Adjust Settings - 8 steps, 2-5 seconds recommended +Generate - Complete in just minutes! + +💡 Optimization Tips +✅ Recommended Settings: 8-10 steps, 576×1024 resolution +✅ Prompting: Use ""cinematic motion, smooth animation"" keywords +✅ Duration: 2-5 seconds for optimal quality +✅ Motion: Emphasize natural movement and camera work +🏆 FusionX Enhanced vs Standard Models +Performance Comparison: While standard models typically require 15-20 inference steps to achieve decent quality, our FusionX Enhanced version delivers premium results in just 8-10 steps - that's more than 50% faster! The rendering speed has been dramatically improved through optimized LoRA fusion, allowing creators to iterate quickly without sacrificing quality. Motion quality has been significantly enhanced with advanced causal modeling, producing smoother, more realistic animations compared to base implementations. Detail preservation is substantially better thanks to MPS Rewards training, maintaining crisp textures and consistent temporal coherence throughout the generated sequences. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c3550d8cc87cf0c06838e7/b-aFkX6_NUAwI81blI9kr.png'}]",[],"[{'reaction': '🔥', 'users': ['seawolf2357', 'FluxAGI', 'fantos', 'ausntmarzi', 'John6666', 'viidfedenwad', 'akahana', 'EmetTheGolum', 'ginipick', '9voltfan2009', 'shlongzilla', 'francisgroup', 'evangenio', 'aiqtech', 'julianjuaner'], 'count': 15}, {'reaction': '👍', 'users': ['aiqcamp', 'fantaxy', '9voltfan2009', 'seawolf2357', 'ginipick'], 'count': 5}]",2025-06-14 06:28:39,2025-06-17 06:56:14.385,"[{'_id': '62d1218f11c2dbcf27e7f6cb', 'avatarUrl': '/avatars/9f246b034c0b4e088c890c1948b2d838.svg', 'fullname': 'Aitor', 'name': 'aitorserra', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/seawolf2357/480409853177984,4475,"{'language': 'en', 'probability': 0.7840110659599304}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,478306591921253,"[{'type': 'text', 'value': 'Ultimate ComfyUI & SwarmUI on RunPod Tutorial with Addition RTX 5000 Series GPUs & 1-Click to Setup : ', 'raw': 'Ultimate ComfyUI & SwarmUI on RunPod Tutorial with Addition RTX 5000 Series GPUs & 1-Click to Setup : '}, {'type': 'link', 'href': 'https://youtu.be/R02kPf9Y3_w', 'raw': 'https://youtu.be/R02kPf9Y3_w'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tutorial Video : ', 'raw': 'Tutorial Video : '}, {'type': 'link', 'href': 'https://youtu.be/R02kPf9Y3_w', 'raw': 'https://youtu.be/R02kPf9Y3_w'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you want to use ComfyUI or SwarmUI with ComfyUI backend on RunPod cloud platform, this is the ultimate tutorial that you will find to step by step install ComfyUI and SwarmUI on RunPod and use each one of them. RunPod is a great platform to scale your AI generation or if you are a GPU poor, rent the very best GPUs and leverage the AI in your profession. ComfyUI is the ultimate ecosystem right now for Image and Video generation models and with SwarmUI interface leveraging ComfyUI, you can become master for gen AI. So learn how to install ComfyUI on RunPod step by step and run it. Then learn how to install SwarmUI on RunPod step by step and learn how to use it. Then learn how to give installed ComfyUI backend to SwarmUI and leverage its features and ultimate performance and optimizations. Moreover, the installers I made installs Torch 2.7, CUDA 12.8, xFormers, Sage Attention, Flash Attention, Accelerate, Triton, DeepSpeed, ComfyUI manager and moıre.', 'raw': 'If you want to use ComfyUI or SwarmUI with ComfyUI backend on RunPod cloud platform, this is the ultimate tutorial that you will find to step by step install ComfyUI and SwarmUI on RunPod and use each one of them. RunPod is a great platform to scale your AI generation or if you are a GPU poor, rent the very best GPUs and leverage the AI in your profession. ComfyUI is the ultimate ecosystem right now for Image and Video generation models and with SwarmUI interface leveraging ComfyUI, you can become master for gen AI. So learn how to install ComfyUI on RunPod step by step and run it. Then learn how to install SwarmUI on RunPod step by step and learn how to use it. Then learn how to give installed ComfyUI backend to SwarmUI and leverage its features and ultimate performance and optimizations. Moreover, the installers I made installs Torch 2.7, CUDA 12.8, xFormers, Sage Attention, Flash Attention, Accelerate, Triton, DeepSpeed, ComfyUI manager and moıre.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗ComfyUI Installer Zip File Download ⤵️', 'raw': '🔗ComfyUI Installer Zip File Download ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/Advanced-ComfyUI-1-Click-Installer-105023709', 'raw': 'https://www.patreon.com/posts/Advanced-ComfyUI-1-Click-Installer-105023709'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗SwarmUI Installer and Model Downloader Zip File Download ⤵️', 'raw': '🔗SwarmUI Installer and Model Downloader Zip File Download ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/SwarmUI-Installer-AI-Videos-Downloader-114517862', 'raw': 'https://www.patreon.com/posts/SwarmUI-Installer-AI-Videos-Downloader-114517862'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ Download & Upload Models Tutorial (wget) : ', 'raw': '▶️ Download & Upload Models Tutorial (wget) : '}, {'type': 'link', 'href': 'https://youtu.be/X5WVZ0NMaTg', 'raw': 'https://youtu.be/X5WVZ0NMaTg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ CausVid LoRA V2 Tutorial : ', 'raw': '▶️ CausVid LoRA V2 Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/1rAwZv0hEcU', 'raw': 'https://youtu.be/1rAwZv0hEcU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ CausVid Main Tutorial : ', 'raw': '▶️ CausVid Main Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/fTzlQ0tjxj0', 'raw': 'https://youtu.be/fTzlQ0tjxj0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ SwarmUI Master Tutorial : ', 'raw': '▶️ SwarmUI Master Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/HKX8_F1Er_w', 'raw': 'https://youtu.be/HKX8_F1Er_w'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 SECourses Official Discord 10500+ Members ⤵️', 'raw': '🔗 SECourses Official Discord 10500+ Members ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388', 'raw': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️', 'raw': '🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://github.com/FurkanGozukara/Stable-Diffusion', 'raw': 'https://github.com/FurkanGozukara/Stable-Diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Ultimate ComfyUI & SwarmUI on RunPod Tutorial with Addition RTX 5000 Series GPUs & 1-Click to Setup : https://youtu.be/R02kPf9Y3_w + +Tutorial Video : https://youtu.be/R02kPf9Y3_w + +If you want to use ComfyUI or SwarmUI with ComfyUI backend on RunPod cloud platform, this is the ultimate tutorial that you will find to step by step install ComfyUI and SwarmUI on RunPod and use each one of them. RunPod is a great platform to scale your AI generation or if you are a GPU poor, rent the very best GPUs and leverage the AI in your profession. ComfyUI is the ultimate ecosystem right now for Image and Video generation models and with SwarmUI interface leveraging ComfyUI, you can become master for gen AI. So learn how to install ComfyUI on RunPod step by step and run it. Then learn how to install SwarmUI on RunPod step by step and learn how to use it. Then learn how to give installed ComfyUI backend to SwarmUI and leverage its features and ultimate performance and optimizations. Moreover, the installers I made installs Torch 2.7, CUDA 12.8, xFormers, Sage Attention, Flash Attention, Accelerate, Triton, DeepSpeed, ComfyUI manager and moıre. + +🔗ComfyUI Installer Zip File Download ⤵️ +▶️ https://www.patreon.com/posts/Advanced-ComfyUI-1-Click-Installer-105023709 + +🔗SwarmUI Installer and Model Downloader Zip File Download ⤵️ +▶️ https://www.patreon.com/posts/SwarmUI-Installer-AI-Videos-Downloader-114517862 + +▶️ Download & Upload Models Tutorial (wget) : https://youtu.be/X5WVZ0NMaTg + +▶️ CausVid LoRA V2 Tutorial : https://youtu.be/1rAwZv0hEcU + +▶️ CausVid Main Tutorial : https://youtu.be/fTzlQ0tjxj0 + +▶️ SwarmUI Master Tutorial : https://youtu.be/HKX8_F1Er_w + +🔗 SECourses Official Discord 10500+ Members ⤵️ +▶️ https://discord.com/servers/software-engineering-courses-secourses-772774097734074388 + +🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️ +▶️ https://github.com/FurkanGozukara/Stable-Diffusion + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Skk8bbK5csM1q4MjfbrJw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/CDBvyEbXYOIT9gQb9Kb95.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Wr2Z0VFwQvf-pNlBREmUd.png'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'eyado1976', 'Fishtiks', 'vnanhtuan', 'TahirC'], 'count': 5}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666'], 'count': 2}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'vnanhtuan'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-06-10 23:40:46,2025-06-10 23:40:46.407,[],/posts/MonsterMMORPG/478306591921253,2052,"{'language': 'en', 'probability': 0.7635219693183899}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63b7a3dae7bf3e5be25f057c/2qIYvus1sS6xiZsyT-OVy.jpeg,2.0,Matthew Rogers,RamboRogers,508716602855303,"[{'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MCP is simple. Basically the MCP process is the LLM is passed in JSON a tool list, and on the output of the LLM there is a listener (in the text) looking for tool call syntax, the tool call is buffered and reflected into the LLM again like it was the user pressing enter in chat, this is how MCP works.', 'raw': 'MCP is simple. Basically the MCP process is the LLM is passed in JSON a tool list, and on the output of the LLM there is a listener (in the text) looking for tool call syntax, the tool call is buffered and reflected into the LLM again like it was the user pressing enter in chat, this is how MCP works.'}]"," +MCP is simple. Basically the MCP process is the LLM is passed in JSON a tool list, and on the output of the LLM there is a listener (in the text) looking for tool call syntax, the tool call is buffered and reflected into the LLM again like it was the user pressing enter in chat, this is how MCP works.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63b7a3dae7bf3e5be25f057c/y__44TVNZbWGZQDdBRilY.png'}]",[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-06-10 16:26:29,2025-06-10 16:27:10.230,[],/posts/RamboRogers/508716602855303,256,"{'language': 'en', 'probability': 0.954230546951294}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,888164728339934,"[{'type': 'text', 'value': 'MCP just hit a tipping point:', 'raw': 'MCP just hit a tipping point:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'mention', 'user': 'hf', 'raw': '@hf'}, {'type': 'text', 'value': '.co made it dead simple: just type ""hf.co/mcp"" in your chat. No JSON wrestling, no config files.', 'raw': '.co made it dead simple: just type ""hf.co/mcp"" in your chat. No JSON wrestling, no config files.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Meanwhile, OpenAI, Google, and Microsoft all adopted it as their standard.', 'raw': '- Meanwhile, OpenAI, Google, and Microsoft all adopted it as their standard.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/fdaudens/mcp-ai-industry-standard', 'raw': 'https://huggingface.co/blog/fdaudens/mcp-ai-industry-standard'}]","MCP just hit a tipping point: +- @hf.co made it dead simple: just type ""hf.co/mcp"" in your chat. No JSON wrestling, no config files. +- Meanwhile, OpenAI, Google, and Microsoft all adopted it as their standard. + +https://huggingface.co/blog/fdaudens/mcp-ai-industry-standard","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/WZdvIKaHUsqkbkbZcTFm2.mp4'}]","[{'_id': '61bab1f61ca95cff16cbb98e', 'avatarUrl': '/avatars/71115043624aabb9a0740f83fb76d6c0.svg', 'fullname': 'huggingface', 'name': 'hf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '🔥', 'users': ['lunarflu', 'John6666', 'ZhengPeng7', 'kwanisiri'], 'count': 4}, {'reaction': '🚀', 'users': ['lunarflu'], 'count': 1}, {'reaction': '🤗', 'users': ['lunarflu'], 'count': 1}]",2025-06-10 15:57:23,2025-06-11 16:49:28.442,"[{'_id': '67c0705a55eb73818a9954f5', 'avatarUrl': '/avatars/d72ffd6035e7ad8dfe2cccc92047c920.svg', 'fullname': 'brian james', 'name': 'kwanisiri', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/fdaudens/888164728339934,1828,"{'language': 'en', 'probability': 0.7751436233520508}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,453763403358138,"[{'type': 'text', 'value': 'Lingshu 🩺📖 medical MLLM released by DAMO Alibaba', 'raw': 'Lingshu 🩺📖 medical MLLM released by DAMO Alibaba'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'lingshu-medical-mllm/lingshu-mllms-6847974ca5b5df750f017dad'}, 'url': 'https://huggingface.co/collections/lingshu-medical-mllm/lingshu-mllms-6847974ca5b5df750f017dad', 'raw': 'https://huggingface.co/collections/lingshu-medical-mllm/lingshu-mllms-6847974ca5b5df750f017dad'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 7B/32B', 'raw': '✨ 7B/32B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 12+ imaging modalities supported: X-Ray, CT, MRI, Microscopy +more', 'raw': '✨ 12+ imaging modalities supported: X-Ray, CT, MRI, Microscopy +more'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Great performance on medical benchmark', 'raw': '✨ Great performance on medical benchmark'}]","Lingshu 🩺📖 medical MLLM released by DAMO Alibaba + +https://huggingface.co/collections/lingshu-medical-mllm/lingshu-mllms-6847974ca5b5df750f017dad + +✨ 7B/32B +✨ 12+ imaging modalities supported: X-Ray, CT, MRI, Microscopy +more +✨ Great performance on medical benchmark","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/7jd8K_Kuk0z22nbUoWNn-.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'Doctor-Chad-PhD', 'upgraedd'], 'count': 3}]",2025-06-10 15:49:32,2025-06-10 15:49:32.745,[],/posts/AdinaY/453763403358138,1592,"{'language': 'en', 'probability': 0.6643010377883911}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620ec53f7c959335c0c65e4b/zYyp99Ufu5HAKFfzLvz95.png,87.0,Francisco Aranda,frascuchon,354181781233320,"[{'type': 'text', 'value': ""Unlock the full potential of your datasets with SHEETS! It's incredibly easy to extend existing datasets and unlock new insights."", 'raw': ""Unlock the full potential of your datasets with SHEETS! It's incredibly easy to extend existing datasets and unlock new insights.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Leverage open-source models to translate, summarize, classify, and more - all directly within your existing columns.', 'raw': 'Leverage open-source models to translate, summarize, classify, and more - all directly within your existing columns.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ready to give it a try? Explore the possibilities here: ', 'raw': 'Ready to give it a try? Explore the possibilities here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'aisheets/sheets'}, 'url': 'https://huggingface.co/spaces/aisheets/sheets', 'raw': 'https://huggingface.co/spaces/aisheets/sheets'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Unlock the full potential of your datasets with SHEETS! It's incredibly easy to extend existing datasets and unlock new insights. + +Leverage open-source models to translate, summarize, classify, and more - all directly within your existing columns. + +Ready to give it a try? Explore the possibilities here: https://huggingface.co/spaces/aisheets/sheets ",[],[],"[{'reaction': '🚀', 'users': ['John6666', 'dvilasuero', 'Fishtiks'], 'count': 3}]",2025-06-10 15:36:53,2025-06-11 07:40:05.708,"[{'_id': '6505a4fad3219dc63c6df764', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6505a4fad3219dc63c6df764/8fye3ukToyDMVOmStxirM.png', 'fullname': 'Arvind Kumar Bhardwaj', 'name': 'arvind3', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '620ec53f7c959335c0c65e4b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/620ec53f7c959335c0c65e4b/zYyp99Ufu5HAKFfzLvz95.png', 'fullname': 'Francisco Aranda', 'name': 'frascuchon', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 87, 'isFollowing': False}]",/posts/frascuchon/354181781233320,1308,"{'language': 'en', 'probability': 0.8723853230476379}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YdisMmTuCDj0gVK2TDml7.jpeg,3.0,EricSondhi,sondhiArm,464787657788756,"[{'type': 'text', 'value': 'Accelerating DLRMv2 Inference on Arm Neoverse CPUs with PyTorch', 'raw': 'Accelerating DLRMv2 Inference on Arm Neoverse CPUs with PyTorch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Join Annop Wongwathanarat (', 'raw': 'Join Annop Wongwathanarat ('}, {'type': 'mention', 'user': 'annop-w', 'raw': '@annop-w'}, {'type': 'text', 'value': '), Arm Principal Software Engineer for a webinar on performance optimization and ML acceleration on Arm-based systems. ', 'raw': '), Arm Principal Software Engineer for a webinar on performance optimization and ML acceleration on Arm-based systems. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'June 12 @ 12:00 pm - 1:00 pm (PST)', 'raw': 'June 12 @ 12:00 pm - 1:00 pm (PST)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Register here:', 'raw': 'Register here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://pytorch.org/event/accelerating-dlrmv2-inference-on-arm-neoverse-cpus-with-pytorch/', 'raw': 'https://pytorch.org/event/accelerating-dlrmv2-inference-on-arm-neoverse-cpus-with-pytorch/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Accelerating DLRMv2 Inference on Arm Neoverse CPUs with PyTorch + +Join Annop Wongwathanarat (@annop-w), Arm Principal Software Engineer for a webinar on performance optimization and ML acceleration on Arm-based systems. + +June 12 @ 12:00 pm - 1:00 pm (PST) + +Register here: +https://pytorch.org/event/accelerating-dlrmv2-inference-on-arm-neoverse-cpus-with-pytorch/ + + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6682a9252200824e2ddc667f/OGIaaGpnTK7uvRDtKrKsi.jpeg'}]","[{'_id': '664f1dffdea6b8ecf1aecc75', 'avatarUrl': '/avatars/09777454fca7cda677d280ebb22ba532.svg', 'fullname': 'Annop Wongwathanarat', 'name': 'annop-w', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-06-10 15:26:09,2025-06-10 15:27:13.277,[],/posts/sondhiArm/464787657788756,180,"{'language': 'en', 'probability': 0.7756890058517456}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62ecdc18b72a69615d6bd857/qAHhWJbSsmoezFHiErBUT.png,434.0,Daniel Han-Chen,danielhanchen,426556210957370,"[{'type': 'text', 'value': 'Mistral releases Magistral, their new reasoning models! 🔥', 'raw': 'Mistral releases Magistral, their new reasoning models! 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GGUFs to run: ', 'raw': 'GGUFs to run: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'unsloth/Magistral-Small-2506-GGUF'}, 'url': 'https://huggingface.co/unsloth/Magistral-Small-2506-GGUF', 'raw': 'https://huggingface.co/unsloth/Magistral-Small-2506-GGUF'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Magistral-Small-2506 excels at mathematics and coding.', 'raw': 'Magistral-Small-2506 excels at mathematics and coding.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can run the 24B model locally with just 32GB RAM by using our Dynamic GGUFs.', 'raw': 'You can run the 24B model locally with just 32GB RAM by using our Dynamic GGUFs.'}]","Mistral releases Magistral, their new reasoning models! 🔥 +GGUFs to run: https://huggingface.co/unsloth/Magistral-Small-2506-GGUF + +Magistral-Small-2506 excels at mathematics and coding. + +You can run the 24B model locally with just 32GB RAM by using our Dynamic GGUFs.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62ecdc18b72a69615d6bd857/fOEXgf97a_Wu0atSfJds6.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['shimmyshimmer', 'John6666', 's-emanuilov', 'moken78', 'Chunte', 'pabloce', 'porzione'], 'count': 7}, {'reaction': '🤗', 'users': ['shimmyshimmer', 'Chunte'], 'count': 2}, {'reaction': '🚀', 'users': ['shimmyshimmer'], 'count': 1}]",2025-06-10 15:01:23,2025-06-10 15:01:23.502,[],/posts/danielhanchen/426556210957370,1920,"{'language': 'en', 'probability': 0.8193451762199402}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63e27f0f1f963b8f20f4a10d/n9KcVAzZDfymP9j_jpTRc.jpeg,90.0,Ame Vi,Ameeeee,853347814425359,"[{'type': 'text', 'value': 'With Sheets, try a new way to create structured content with the help of AI!', 'raw': 'With Sheets, try a new way to create structured content with the help of AI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'No installs. No login. Just open a link and 🤩', 'raw': 'No installs. No login. Just open a link and 🤩'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This app lets you create a dataset by importing a file or starting from a prompt.', 'raw': 'This app lets you create a dataset by importing a file or starting from a prompt.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What’s different about SHEETS?', 'raw': 'What’s different about SHEETS?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔎 Web search integration to ground answers in real-world data', 'raw': '🔎 Web search integration to ground answers in real-world data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 In-context learning from validated sources', 'raw': '📚 In-context learning from validated sources'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Transparent sourcing — every result is linked', 'raw': '🔗 Transparent sourcing — every result is linked'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧩 Runs on multiple open-source models', 'raw': '🧩 Runs on multiple open-source models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fight hallucinations and start creating content you can rely on.', 'raw': 'Fight hallucinations and start creating content you can rely on.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","With Sheets, try a new way to create structured content with the help of AI! + +No installs. No login. Just open a link and 🤩 + +This app lets you create a dataset by importing a file or starting from a prompt. + +What’s different about SHEETS? +🔎 Web search integration to ground answers in real-world data +📚 In-context learning from validated sources +🔗 Transparent sourcing — every result is linked +🧩 Runs on multiple open-source models + +Fight hallucinations and start creating content you can rely on. + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63e27f0f1f963b8f20f4a10d/TO3MvNyBLKYj8HvUfNlgs.mp4'}]",[],"[{'reaction': '🚀', 'users': ['dvilasuero', 'John6666', 'OmbelineM'], 'count': 3}, {'reaction': '❤️', 'users': ['dvilasuero', 'arannindia'], 'count': 2}, {'reaction': '🧠', 'users': ['dvilasuero'], 'count': 1}, {'reaction': '🔥', 'users': ['nyuuzyou'], 'count': 1}]",2025-06-10 14:44:56,2025-06-10 14:44:56.628,[],/posts/Ameeeee/853347814425359,1750,"{'language': 'en', 'probability': 0.8625073432922363}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg,334.0,Daniel Vila,dvilasuero,324662497616161,"[{'type': 'text', 'value': 'Super excited to launch Hugging Face Sheets: Spreadsheets meet AI and unstructured data.', 'raw': 'Super excited to launch Hugging Face Sheets: Spreadsheets meet AI and unstructured data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A few months ago, we started imagining new ways to build and transform datasets with the latest open-source models.', 'raw': 'A few months ago, we started imagining new ways to build and transform datasets with the latest open-source models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Today, I'm thrilled to introduce our first step in this direction."", 'raw': ""Today, I'm thrilled to introduce our first step in this direction.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In a nutshell:', 'raw': 'In a nutshell:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Effortlessly run prompts and models over your data.', 'raw': '📁 Effortlessly run prompts and models over your data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Agentic search for accuracy and real-time information.', 'raw': '🌐 Agentic search for accuracy and real-time information.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ Familiar, minimalistic interface for interacting with data.', 'raw': '🖼️ Familiar, minimalistic interface for interacting with data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Human feedback 2.0: Your input directly improves generated data.', 'raw': '🎯 Human feedback 2.0: Your input directly improves generated data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💯 Access hundreds of open models and leading inference providers.', 'raw': '💯 Access hundreds of open models and leading inference providers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Go to this space to try it out! ', 'raw': 'Go to this space to try it out! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'aisheets/sheets'}, 'url': 'https://huggingface.co/spaces/aisheets/sheets', 'raw': 'https://huggingface.co/spaces/aisheets/sheets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Leave your questions below, we're just getting started!"", 'raw': ""Leave your questions below, we're just getting started!""}]","Super excited to launch Hugging Face Sheets: Spreadsheets meet AI and unstructured data. + +A few months ago, we started imagining new ways to build and transform datasets with the latest open-source models. + +Today, I'm thrilled to introduce our first step in this direction. + + +In a nutshell: + +📁 Effortlessly run prompts and models over your data. +🌐 Agentic search for accuracy and real-time information. +🖼️ Familiar, minimalistic interface for interacting with data. +🎯 Human feedback 2.0: Your input directly improves generated data. +💯 Access hundreds of open models and leading inference providers. + +Go to this space to try it out! + +https://huggingface.co/spaces/aisheets/sheets + +Leave your questions below, we're just getting started!",[],[],"[{'reaction': '🔥', 'users': ['jeffboudier', 'Ameeeee', 'John6666', 'catyung', 'drwlf', 'Fishtiks', 'azettl', 'frascuchon', 'ai-practitioner', 'nz-nz'], 'count': 10}, {'reaction': '❤️', 'users': ['jeffboudier', 'Ameeeee', 'seanswyi', 'frascuchon', 'ai-practitioner'], 'count': 5}, {'reaction': '😎', 'users': ['jeffboudier', 'Ameeeee', 'frascuchon'], 'count': 3}, {'reaction': '🚀', 'users': ['ai-practitioner'], 'count': 1}]",2025-06-10 14:32:23,2025-06-16 10:05:25.878,"[{'_id': '5f75a4c1f1e7ef6e919a1db3', 'avatarUrl': '/avatars/1daf80673c9297560d8a7004b894b2be.svg', 'fullname': 'Sean Seok-Won Yi', 'name': 'seanswyi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '60420dccc15e823a685f2b03', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg', 'fullname': 'Daniel Vila', 'name': 'dvilasuero', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 334, 'isFollowing': False}]",/posts/dvilasuero/324662497616161,2578,"{'language': 'en', 'probability': 0.8291881680488586}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg,3949.0,Victor Mustar,victor,273066327277259,"[{'type': 'text', 'value': 'Open Source Avengers, Assemble! Ask an expert AI agent team to solve complex problems together 🔥', 'raw': 'Open Source Avengers, Assemble! Ask an expert AI agent team to solve complex problems together 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Consilium brings together multiple agents that debate and use live research (web, arXiv, SEC) to reach a consensus. You set the strategy, they find the answer.', 'raw': 'Consilium brings together multiple agents that debate and use live research (web, arXiv, SEC) to reach a consensus. You set the strategy, they find the answer.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Credit to ', 'raw': 'Credit to '}, {'type': 'mention', 'user': 'azettl', 'raw': '@azettl'}, {'type': 'text', 'value': ' for this awesome demo: ', 'raw': ' for this awesome demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Agents-MCP-Hackathon/consilium_mcp'}, 'url': 'https://huggingface.co/spaces/Agents-MCP-Hackathon/consilium_mcp', 'raw': 'https://huggingface.co/spaces/Agents-MCP-Hackathon/consilium_mcp'}, {'type': 'new_line', 'raw': '\n'}]","Open Source Avengers, Assemble! Ask an expert AI agent team to solve complex problems together 🔥 + +Consilium brings together multiple agents that debate and use live research (web, arXiv, SEC) to reach a consensus. You set the strategy, they find the answer. + +Credit to @azettl for this awesome demo: https://huggingface.co/spaces/Agents-MCP-Hackathon/consilium_mcp +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/N4AKe8f6FUh2SGPWfD-Rd.mp4'}]","[{'_id': '65ba36f30659ad04d028d083', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65ba36f30659ad04d028d083/oAlEn8_Aa-sO2oq2Uj8eH.jpeg', 'fullname': 'Andreas', 'name': 'azettl', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 25}]","[{'reaction': '❤️', 'users': ['azettl', 'John6666', 'pierrci', 'kanddle', 'AIMaster7'], 'count': 5}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}]",2025-06-10 12:40:06,2025-06-10 14:58:23.201,"[{'_id': '65ba36f30659ad04d028d083', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65ba36f30659ad04d028d083/oAlEn8_Aa-sO2oq2Uj8eH.jpeg', 'fullname': 'Andreas', 'name': 'azettl', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 25, 'isFollowing': False}, {'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}]",/posts/victor/273066327277259,2449,"{'language': 'en', 'probability': 0.816868007183075}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/mrB2AFA3K9g3gJz-WxxjH.jpeg,2.0,kanaria007,kanaria007,256062689432737,"[{'type': 'text', 'value': '✅ New Article on Hugging Face: Structural Intelligence and Cognitive Theory', 'raw': '✅ New Article on Hugging Face: Structural Intelligence and Cognitive Theory'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Title:', 'raw': 'Title:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📘 Structural Intelligence Protocols: Implementation Results and Cognitive Theory Correspondence', 'raw': '📘 Structural Intelligence Protocols: Implementation Results and Cognitive Theory Correspondence'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '�� Read the article here: ', 'raw': '🔗 Read the article here: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/kanaria007/agi-protocol-cognitive-match', 'raw': 'https://huggingface.co/blog/kanaria007/agi-protocol-cognitive-match'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Summary:', 'raw': 'Summary:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This article presents follow-up results from the structural intelligence protocol project. It documents how a prompt-based architecture induces reproducible reasoning structures in GPT-4o, Claude, and Gemini — with observed behavioral parallels to theories in metacognition, working memory, and moral reasoning.', 'raw': 'This article presents follow-up results from the structural intelligence protocol project. It documents how a prompt-based architecture induces reproducible reasoning structures in GPT-4o, Claude, and Gemini — with observed behavioral parallels to theories in metacognition, working memory, and moral reasoning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Surprisingly, this framework aligns with cognitive science research — not by design, but by emergence from structural implementation.', 'raw': 'Surprisingly, this framework aligns with cognitive science research — not by design, but by emergence from structural implementation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It suggests a protocol-driven, reproducible route to AGI research that is compatible with current LLM platforms.', 'raw': 'It suggests a protocol-driven, reproducible route to AGI research that is compatible with current LLM platforms.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resources:', 'raw': 'Resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\t🧠 Protocol Dataset: ', 'raw': '\t•\t🧠 Protocol Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'kanaria007/agi-structural-intelligence-protocols'}, 'url': 'https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols', 'raw': 'https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\t📘 FAQ and validation logs included', 'raw': '\t•\t📘 FAQ and validation logs included'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\t🧪 Platform-tested: GPT-4o, Claude Sonnet 4, Gemini 2.5 Flash', 'raw': '\t•\t🧪 Platform-tested: GPT-4o, Claude Sonnet 4, Gemini 2.5 Flash'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Relevant to those exploring:', 'raw': 'Relevant to those exploring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tStructural cognitive architectures', 'raw': '\t•\tStructural cognitive architectures'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tMetacognition in LLMs', 'raw': '\t•\tMetacognition in LLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tProtocol-based AGI frameworks', 'raw': '\t•\tProtocol-based AGI frameworks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tAlignment through educational design', 'raw': '\t•\tAlignment through educational design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tTheory-practice connections in AI cognition', 'raw': '\t•\tTheory-practice connections in AI cognition'}]","✅ New Article on Hugging Face: Structural Intelligence and Cognitive Theory + +Title: +📘 Structural Intelligence Protocols: Implementation Results and Cognitive Theory Correspondence +🔗 Read the article here: https://huggingface.co/blog/kanaria007/agi-protocol-cognitive-match + +Summary: +This article presents follow-up results from the structural intelligence protocol project. It documents how a prompt-based architecture induces reproducible reasoning structures in GPT-4o, Claude, and Gemini — with observed behavioral parallels to theories in metacognition, working memory, and moral reasoning. + +Surprisingly, this framework aligns with cognitive science research — not by design, but by emergence from structural implementation. +It suggests a protocol-driven, reproducible route to AGI research that is compatible with current LLM platforms. + +Resources: + • 🧠 Protocol Dataset: https://huggingface.co/datasets/kanaria007/agi-structural-intelligence-protocols + • 📘 FAQ and validation logs included + • 🧪 Platform-tested: GPT-4o, Claude Sonnet 4, Gemini 2.5 Flash + +Relevant to those exploring: + • Structural cognitive architectures + • Metacognition in LLMs + • Protocol-based AGI frameworks + • Alignment through educational design + • Theory-practice connections in AI cognition",[],[],"[{'reaction': '🧠', 'users': ['John6666'], 'count': 1}]",2025-06-05 14:04:19,2025-06-05 14:04:19.186,[],/posts/kanaria007/256062689432737,278,"{'language': 'en', 'probability': 0.8043763637542725}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,854108171347548,"[{'type': 'text', 'value': 'OpenAudio S1-mini 🔊 a new OPEN multilingual TTS model trained on 2M+ hours of data, by FishAudio', 'raw': 'OpenAudio S1-mini 🔊 a new OPEN multilingual TTS model trained on 2M+ hours of data, by FishAudio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'fishaudio/openaudio-s1-mini'}, 'url': 'https://huggingface.co/fishaudio/openaudio-s1-mini', 'raw': 'https://huggingface.co/fishaudio/openaudio-s1-mini'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Supports 14 languages', 'raw': '✨ Supports 14 languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 50+ emotions & tones', 'raw': '✨ 50+ emotions & tones'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ RLHF-optimized', 'raw': '✨ RLHF-optimized'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Special effects: laughing, crying, shouting, etc.', 'raw': '✨ Special effects: laughing, crying, shouting, etc.'}, {'type': 'new_line', 'raw': '\n'}]","OpenAudio S1-mini 🔊 a new OPEN multilingual TTS model trained on 2M+ hours of data, by FishAudio + +https://huggingface.co/fishaudio/openaudio-s1-mini + +✨ Supports 14 languages +✨ 50+ emotions & tones +✨ RLHF-optimized +✨ Special effects: laughing, crying, shouting, etc. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/VyIzAkJZjdJqMdcmSdBfH.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['loubnabnl', 'BrigitteTousi', 'JackCloudman', 'trevon', 'owlmaniac', 'John6666'], 'count': 6}]",2025-06-05 08:18:27,2025-06-05 08:47:44.063,"[{'_id': '67daa1ace464cecf6df3dc8e', 'avatarUrl': '/avatars/4165f710c627910f9e0fb903b345afa6.svg', 'fullname': 'mehran', 'name': 'diginoron', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/AdinaY/854108171347548,1620,"{'language': 'en', 'probability': 0.6923365592956543}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/H5ncB4vaBtP8GVCidgxL0.png,242.0,seawolf,seawolf2357,221417890817416,"[{'type': 'text', 'value': '🚀 Just Found an Interesting New Leaderboard for Medical AI Evaluation!', 'raw': '🚀 Just Found an Interesting New Leaderboard for Medical AI Evaluation!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I recently stumbled upon a medical domain-specific FACTS Grounding leaderboard on Hugging Face, and the approach to evaluating AI accuracy in medical contexts is quite impressive, so I thought I'd share."", 'raw': ""I recently stumbled upon a medical domain-specific FACTS Grounding leaderboard on Hugging Face, and the approach to evaluating AI accuracy in medical contexts is quite impressive, so I thought I'd share.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 What is FACTS Grounding?', 'raw': '📊 What is FACTS Grounding?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's originally a benchmark developed by Google DeepMind that measures how well LLMs generate answers based solely on provided documents. What's cool about this medical-focused version is that it's designed to test even small open-source models."", 'raw': ""It's originally a benchmark developed by Google DeepMind that measures how well LLMs generate answers based solely on provided documents. What's cool about this medical-focused version is that it's designed to test even small open-source models.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏥 Medical Domain Version Features', 'raw': '🏥 Medical Domain Version Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '236 medical examples: Extracted from the original 860 examples', 'raw': '236 medical examples: Extracted from the original 860 examples'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tests small models like Qwen 3 1.7B: Great for resource-constrained environments', 'raw': 'Tests small models like Qwen 3 1.7B: Great for resource-constrained environments'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Uses Gemini 1.5 Flash for evaluation: Simplified to a single judge model', 'raw': 'Uses Gemini 1.5 Flash for evaluation: Simplified to a single judge model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📈 The Evaluation Method is Pretty Neat', 'raw': '📈 The Evaluation Method is Pretty Neat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Grounding Score: Are all claims in the response supported by the provided document?', 'raw': 'Grounding Score: Are all claims in the response supported by the provided document?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Quality Score: Does it properly answer the user's question?"", 'raw': ""Quality Score: Does it properly answer the user's question?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Combined Score: Did it pass both checks?', 'raw': 'Combined Score: Did it pass both checks?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Since medical information requires extreme accuracy, this thorough verification approach makes a lot of sense.', 'raw': 'Since medical information requires extreme accuracy, this thorough verification approach makes a lot of sense.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Check It Out Yourself', 'raw': '🔗 Check It Out Yourself'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The actual leaderboard: ', 'raw': 'The actual leaderboard: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MaziyarPanahi/FACTS-Leaderboard'}, 'url': 'https://huggingface.co/spaces/MaziyarPanahi/FACTS-Leaderboard', 'raw': 'https://huggingface.co/spaces/MaziyarPanahi/FACTS-Leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💭 My thoughts: As medical AI continues to evolve, evaluation tools like this are becoming increasingly important. The fact that it can test smaller models is particularly helpful for the open-source community!', 'raw': '💭 My thoughts: As medical AI continues to evolve, evaluation tools like this are becoming increasingly important. The fact that it can test smaller models is particularly helpful for the open-source community!'}]","🚀 Just Found an Interesting New Leaderboard for Medical AI Evaluation! + +I recently stumbled upon a medical domain-specific FACTS Grounding leaderboard on Hugging Face, and the approach to evaluating AI accuracy in medical contexts is quite impressive, so I thought I'd share. + +📊 What is FACTS Grounding? +It's originally a benchmark developed by Google DeepMind that measures how well LLMs generate answers based solely on provided documents. What's cool about this medical-focused version is that it's designed to test even small open-source models. + +🏥 Medical Domain Version Features + +236 medical examples: Extracted from the original 860 examples +Tests small models like Qwen 3 1.7B: Great for resource-constrained environments +Uses Gemini 1.5 Flash for evaluation: Simplified to a single judge model + +📈 The Evaluation Method is Pretty Neat + +Grounding Score: Are all claims in the response supported by the provided document? +Quality Score: Does it properly answer the user's question? +Combined Score: Did it pass both checks? + +Since medical information requires extreme accuracy, this thorough verification approach makes a lot of sense. +🔗 Check It Out Yourself + +The actual leaderboard: https://huggingface.co/spaces/MaziyarPanahi/FACTS-Leaderboard + +💭 My thoughts: As medical AI continues to evolve, evaluation tools like this are becoming increasingly important. The fact that it can test smaller models is particularly helpful for the open-source community!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c3550d8cc87cf0c06838e7/-6thW7l1pRh_NIVpPL3XS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c3550d8cc87cf0c06838e7/vMFM7OH4PnfZmaT3IV98U.png'}]",[],"[{'reaction': '👍', 'users': ['seawolf2357', 'immunobiotech', 'diginoron', 'viidfedenwad', 'ginipick', 'openfree', 'uzvisa', 'John6666', 'shtormish', 'FluxAGI'], 'count': 10}, {'reaction': '🔥', 'users': ['seawolf2357', 'viidfedenwad', 'openfree', 'immunobiotech'], 'count': 4}, {'reaction': '🤗', 'users': ['seawolf2357', 'viidfedenwad'], 'count': 2}, {'reaction': '❤️', 'users': ['viidfedenwad'], 'count': 1}]",2025-06-05 07:45:55,2025-06-05 07:52:55.154,[],/posts/seawolf2357/221417890817416,1537,"{'language': 'en', 'probability': 0.912344217300415}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,143728924504132,"[{'type': 'text', 'value': ""EARLY SNEAK PREVIEW: get a first look at the Celestia 3 science-reasoning dataset, built with DeepSeek's newest R1-0528 reasoning model! Subjects include physics, chemistry, biology, computer science, Earth science, astronomy, and information theory."", 'raw': ""EARLY SNEAK PREVIEW: get a first look at the Celestia 3 science-reasoning dataset, built with DeepSeek's newest R1-0528 reasoning model! Subjects include physics, chemistry, biology, computer science, Earth science, astronomy, and information theory.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This early look contains the first 14k rows, all synthetic responses using ', 'raw': 'This early look contains the first 14k rows, all synthetic responses using '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-R1-0528'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-R1-0528', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-R1-0528'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SEE IT HERE: ', 'raw': 'SEE IT HERE: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'sequelbox/Celestia3-DeepSeek-R1-0528-PREVIEW'}, 'url': 'https://huggingface.co/datasets/sequelbox/Celestia3-DeepSeek-R1-0528-PREVIEW', 'raw': 'https://huggingface.co/datasets/sequelbox/Celestia3-DeepSeek-R1-0528-PREVIEW'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Support our releases: ', 'raw': 'Support our releases: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'sequelbox/SupportOpenSource'}, 'url': 'https://huggingface.co/spaces/sequelbox/SupportOpenSource', 'raw': 'https://huggingface.co/spaces/sequelbox/SupportOpenSource'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Coming up we'll have more dataset releases, including some novel reasoning and analysis methods - we think an important role for open source researchers is experimenting with new response styles on top of the increasingly excellent base models available to finetune."", 'raw': ""Coming up we'll have more dataset releases, including some novel reasoning and analysis methods - we think an important role for open source researchers is experimenting with new response styles on top of the increasingly excellent base models available to finetune.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'more to come soon!', 'raw': 'more to come soon!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'allegra', 'raw': 'allegra'}]","EARLY SNEAK PREVIEW: get a first look at the Celestia 3 science-reasoning dataset, built with DeepSeek's newest R1-0528 reasoning model! Subjects include physics, chemistry, biology, computer science, Earth science, astronomy, and information theory. + +This early look contains the first 14k rows, all synthetic responses using https://huggingface.co/deepseek-ai/DeepSeek-R1-0528 + +SEE IT HERE: https://huggingface.co/datasets/sequelbox/Celestia3-DeepSeek-R1-0528-PREVIEW + +Support our releases: https://huggingface.co/spaces/sequelbox/SupportOpenSource + +Coming up we'll have more dataset releases, including some novel reasoning and analysis methods - we think an important role for open source researchers is experimenting with new response styles on top of the increasingly excellent base models available to finetune. + +more to come soon! +allegra",[],[],"[{'reaction': '🔥', 'users': ['sometimesanotion', 'BrigitteTousi', 'John6666'], 'count': 3}, {'reaction': '🚀', 'users': ['zoeywin'], 'count': 1}]",2025-06-05 03:29:39,2025-06-05 03:29:39.836,[],/posts/sequelbox/143728924504132,1096,"{'language': 'en', 'probability': 0.8469672203063965}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1667626701210-noauth.jpeg,3.0,Apurva Mishra,mav3ri3k,289886358161842,"[{'type': 'text', 'value': 'First time training flow matching model in jax.', 'raw': 'First time training flow matching model in jax.'}]",First time training flow matching model in jax.,"[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6365fc2698da81987e2a26af/bgGdKzk81VH5y54_1NzwY.mp4'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-06-05 01:44:36,2025-06-05 01:44:36.216,[],/posts/mav3ri3k/289886358161842,283,"{'language': 'en', 'probability': 0.8873348236083984}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png,5900.0,Joshua,Xenova,927328273503233,"[{'type': 'text', 'value': 'NEW: Real-time conversational AI models can now run 100% locally in your browser! 🤯', 'raw': 'NEW: Real-time conversational AI models can now run 100% locally in your browser! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔐 Privacy by design (no data leaves your device)', 'raw': '🔐 Privacy by design (no data leaves your device)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💰 Completely free... forever', 'raw': '💰 Completely free... forever'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📦 Zero installation required, just visit a website', 'raw': '📦 Zero installation required, just visit a website'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡️ Blazingly-fast WebGPU-accelerated inference', 'raw': '⚡️ Blazingly-fast WebGPU-accelerated inference'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out: ', 'raw': 'Try it out: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'webml-community/conversational-webgpu'}, 'url': 'https://huggingface.co/spaces/webml-community/conversational-webgpu', 'raw': 'https://huggingface.co/spaces/webml-community/conversational-webgpu'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""For those interested, here's how it works:"", 'raw': ""For those interested, here's how it works:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Silero VAD for voice activity detection', 'raw': '- Silero VAD for voice activity detection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Whisper for speech recognition', 'raw': '- Whisper for speech recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SmolLM2-1.7B for text generation', 'raw': '- SmolLM2-1.7B for text generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Kokoro for text to speech', 'raw': '- Kokoro for text to speech'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Powered by Transformers.js and ONNX Runtime Web! 🤗 I hope you like it!', 'raw': 'Powered by Transformers.js and ONNX Runtime Web! 🤗 I hope you like it!'}]","NEW: Real-time conversational AI models can now run 100% locally in your browser! 🤯 + +🔐 Privacy by design (no data leaves your device) +💰 Completely free... forever +📦 Zero installation required, just visit a website +⚡️ Blazingly-fast WebGPU-accelerated inference + +Try it out: https://huggingface.co/spaces/webml-community/conversational-webgpu + +For those interested, here's how it works: +- Silero VAD for voice activity detection +- Whisper for speech recognition +- SmolLM2-1.7B for text generation +- Kokoro for text to speech + +Powered by Transformers.js and ONNX Runtime Web! 🤗 I hope you like it!","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b253b7ac5ecaae3d1efe0c/PiN1qeIcbDouDAtf7nQfj.mp4'}]",[],"[{'reaction': '🔥', 'users': ['Poorguywork', 'FM-1976', 'nekomeowww', 'vaibhav297', 'toczkos', 'rasgaard', 'loubnabnl', 'kilian303', 'mrdbourke', 'Leamsigc', 'John6666', 'SignOfZeta', 'HariDangi', 'kagia'], 'count': 14}]",2025-06-04 18:54:57,2025-06-10 11:40:38.929,"[{'_id': '5eeec7e05e979253a010ee8f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1592707013900-noauth.jpeg', 'fullname': 'James David Morris', 'name': 'awokeknowing', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '63f157a82f7c0152e872fc5a', 'avatarUrl': '/avatars/6a33eac788fbd6c1f8efa6baf0dc87fb.svg', 'fullname': 'kilian lindberg', 'name': 'kilian303', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '660f89cc7daf5d7edafdcbe8', 'avatarUrl': '/avatars/a16113cd8effc672d12a4888b58d8164.svg', 'fullname': 'Gupta', 'name': 'Shikhu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Xenova/927328273503233,4593,"{'language': 'en', 'probability': 0.8573838472366333}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a7422854f1d0225b075bfc/XGYAcDPZG5ZEsNBWG6guw.jpeg,75.0,lhl,leonardlin,443397594136956,"[{'type': 'text', 'value': ""I'm excited to announce the official release of our Shisa V2 405B model:"", 'raw': ""I'm excited to announce the official release of our Shisa V2 405B model:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'shisa-ai/shisa-v2-llama3.1-405b'}, 'url': 'https://huggingface.co/shisa-ai/shisa-v2-llama3.1-405b', 'raw': 'https://huggingface.co/shisa-ai/shisa-v2-llama3.1-405b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's the strongest model ever trained in Japan, and even goes toe-to-toe w/ GPT-4o and DeepSeek-V3 in JA MT-Bench."", 'raw': ""It's the strongest model ever trained in Japan, and even goes toe-to-toe w/ GPT-4o and DeepSeek-V3 in JA MT-Bench.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For all the details, be sure to check out post and overview report here: ', 'raw': 'For all the details, be sure to check out post and overview report here: '}, {'type': 'link', 'href': 'https://shisa.ai/posts/shisa-v2-405b/', 'raw': 'https://shisa.ai/posts/shisa-v2-405b/'}]","I'm excited to announce the official release of our Shisa V2 405B model: +https://huggingface.co/shisa-ai/shisa-v2-llama3.1-405b + +It's the strongest model ever trained in Japan, and even goes toe-to-toe w/ GPT-4o and DeepSeek-V3 in JA MT-Bench. + +For all the details, be sure to check out post and overview report here: https://shisa.ai/posts/shisa-v2-405b/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a7422854f1d0225b075bfc/GFVCv_AnvwxxcRl5KZHjY.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a7422854f1d0225b075bfc/8GliM-JgT33ANEXrru8tI.png'}]",[],"[{'reaction': '🚀', 'users': ['Tonic', 'John6666'], 'count': 2}, {'reaction': '👍', 'users': ['Tonic'], 'count': 1}, {'reaction': '🧠', 'users': ['Tonic'], 'count': 1}]",2025-06-04 18:07:38,2025-06-04 18:07:38.748,[],/posts/leonardlin/443397594136956,353,"{'language': 'en', 'probability': 0.8541271686553955}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/651feacae355187fca1e5a37/Il-BoXkYQxQt_znxnE-ev.jpeg,1.0,Myrna Rau,Threatthriver,720491332395336,"[{'type': 'text', 'value': 'New Dataset Released ', 'raw': 'New Dataset Released '}]",New Dataset Released ,[],[],"[{'reaction': '😎', 'users': ['Threatthriver', 'John6666', 'heleue'], 'count': 3}]",2025-06-04 17:35:38,2025-06-04 17:35:38.398,[],/posts/Threatthriver/720491332395336,259,"{'language': 'en', 'probability': 0.8451709747314453}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,563069452538504,"[{'type': 'text', 'value': '🙋🏻\u200d♂️ hey there folks , ', 'raw': '🙋🏻\u200d♂️ hey there folks , '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So every bio/med/chem meeting i go to i always the same questions ""why are you sharing a gdrive link with me for this?"" and ""Do you have any plans to publish your model weights and datasets on huggingface?"" and finally i got a good answer today which explains everything :', 'raw': 'So every bio/med/chem meeting i go to i always the same questions ""why are you sharing a gdrive link with me for this?"" and ""Do you have any plans to publish your model weights and datasets on huggingface?"" and finally i got a good answer today which explains everything :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'basically there is some kind of government censorship on this (usa, but i\'m sure others too) and they are told they are not allowed as it is considered a ""dataleak"" which is illegal !!!! ', 'raw': 'basically there is some kind of government censorship on this (usa, but i\'m sure others too) and they are told they are not allowed as it is considered a ""dataleak"" which is illegal !!!! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'this is terrible ! but the good news is that we can do something about it !', 'raw': 'this is terrible ! but the good news is that we can do something about it !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'so there is this ""call for opinions and comments"" here from the NIH (usa) , and here we can make our opinion on this topic known : ', 'raw': 'so there is this ""call for opinions and comments"" here from the NIH (usa) , and here we can make our opinion on this topic known : '}, {'type': 'link', 'href': 'https://osp.od.nih.gov/comment-form-responsibly-developing-and-sharing-generative-artificial-intelligence-tools-using-nih-controlled-access-data/', 'raw': 'https://osp.od.nih.gov/comment-form-responsibly-developing-and-sharing-generative-artificial-intelligence-tools-using-nih-controlled-access-data/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'kindly consider dropping your opinion and thoughts about this censorship of science , and share this post , link or thoughts widely .', 'raw': 'kindly consider dropping your opinion and thoughts about this censorship of science , and share this post , link or thoughts widely .'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Together maybe we can start to share data and model weights appropriately and openly in a good way 🙏🏻🚀', 'raw': 'Together maybe we can start to share data and model weights appropriately and openly in a good way 🙏🏻🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'cc. ', 'raw': 'cc. '}, {'type': 'mention', 'user': 'cyrilzakka', 'raw': '@cyrilzakka'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","🙋🏻‍♂️ hey there folks , + +So every bio/med/chem meeting i go to i always the same questions ""why are you sharing a gdrive link with me for this?"" and ""Do you have any plans to publish your model weights and datasets on huggingface?"" and finally i got a good answer today which explains everything : + +basically there is some kind of government censorship on this (usa, but i'm sure others too) and they are told they are not allowed as it is considered a ""dataleak"" which is illegal !!!! + +this is terrible ! but the good news is that we can do something about it ! + +so there is this ""call for opinions and comments"" here from the NIH (usa) , and here we can make our opinion on this topic known : https://osp.od.nih.gov/comment-form-responsibly-developing-and-sharing-generative-artificial-intelligence-tools-using-nih-controlled-access-data/ + +kindly consider dropping your opinion and thoughts about this censorship of science , and share this post , link or thoughts widely . + +Together maybe we can start to share data and model weights appropriately and openly in a good way 🙏🏻🚀 + +cc. @cyrilzakka + +",[],"[{'_id': '66ba71a4447411b9c0e19d71', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4f93ZrYdaKfK3F53IB51x.jpeg', 'fullname': 'Cyril', 'name': 'cyrilzakka', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 79}]","[{'reaction': '🤗', 'users': ['John6666'], 'count': 1}]",2025-06-04 16:56:34,2025-06-04 16:56:34.206,[],/posts/Tonic/563069452538504,592,"{'language': 'en', 'probability': 0.9411457180976868}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/34gmtYV8zc2Z_w7fhs_Tj.png,,Mary Harvety,mary14511,477657023598100,"[{'type': 'text', 'value': 'it keep asking me to login but my account is already login i am tired of this shit', 'raw': 'it keep asking me to login but my account is already login i am tired of this shit'}]",it keep asking me to login but my account is already login i am tired of this shit,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6831d3f21bdea85fad86ad8f/sL2l8iR1AvyAnvR11ukMl.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-06-04 15:52:15,2025-06-06 15:40:49.730,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/mary14511/477657023598100,206,"{'language': 'en', 'probability': 0.9599895477294922}",1 +/avatars/aa880f8154840d70c7ec7d75373b8a30.svg,18.0,Ruslan Vasilev,artnitolog,214780020212238,"[{'type': 'text', 'value': 'awesome-arXiv 🚀: ', 'raw': 'awesome-arXiv 🚀: '}, {'type': 'link', 'href': 'https://github.com/artnitolog/awesome-arxiv', 'raw': 'https://github.com/artnitolog/awesome-arxiv'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've just released awesome-arXiv, a curated collection of tools, libraries, datasets, and resources for discovering, reading, and automating your work with arXiv papers."", 'raw': ""I've just released awesome-arXiv, a curated collection of tools, libraries, datasets, and resources for discovering, reading, and automating your work with arXiv papers.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Feedback and contributions are welcomed!', 'raw': 'Feedback and contributions are welcomed!'}]","awesome-arXiv 🚀: https://github.com/artnitolog/awesome-arxiv + +I've just released awesome-arXiv, a curated collection of tools, libraries, datasets, and resources for discovering, reading, and automating your work with arXiv papers. + +Feedback and contributions are welcomed!",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-06-02 09:28:49,2025-06-02 09:28:49.707,[],/posts/artnitolog/214780020212238,222,"{'language': 'en', 'probability': 0.8762950301170349}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620ec53f7c959335c0c65e4b/zYyp99Ufu5HAKFfzLvz95.png,87.0,Francisco Aranda,frascuchon,853893475667585,"[{'type': 'text', 'value': 'Hey! I built RAG MCP Server Space, a simple Gradio MCP server for RAG systems that allows you to search relevant results without passing huge contexts to your LLM.', 'raw': 'Hey! I built RAG MCP Server Space, a simple Gradio MCP server for RAG systems that allows you to search relevant results without passing huge contexts to your LLM.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can use this space to integrate with your agents and improve the efficiency of your search results. Feel free to try it out and let me know if you have any feedback or questions!', 'raw': 'You can use this space to integrate with your agents and improve the efficiency of your search results. Feel free to try it out and let me know if you have any feedback or questions!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'frascuchon/rag-mcp-server'}, 'url': 'https://huggingface.co/spaces/frascuchon/rag-mcp-server', 'raw': 'https://huggingface.co/spaces/frascuchon/rag-mcp-server'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks for checking it out!', 'raw': 'Thanks for checking it out!'}, {'type': 'new_line', 'raw': '\n'}]","Hey! I built RAG MCP Server Space, a simple Gradio MCP server for RAG systems that allows you to search relevant results without passing huge contexts to your LLM. + +You can use this space to integrate with your agents and improve the efficiency of your search results. Feel free to try it out and let me know if you have any feedback or questions! + +https://huggingface.co/spaces/frascuchon/rag-mcp-server + +Thanks for checking it out! +",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dvilasuero', 'stellaray777', 'ykarout', 'victor', 'Ameeeee'], 'count': 6}, {'reaction': '❤️', 'users': ['dvilasuero', 'stellaray777', 'dusty504', 'korarishi', 'Ameeeee'], 'count': 5}]",2025-06-02 08:22:45,2025-06-02 08:22:45.447,[],/posts/frascuchon/853893475667585,2990,"{'language': 'en', 'probability': 0.8314784169197083}",0 +/avatars/7e5b4b94d80d405026261fd723a4d1e8.svg,39.0,Dhruv,dhruv3006,943709012268825,"[{'type': 'text', 'value': 'App-Use : Create virtual desktops for AI agents to focus on specific apps.', 'raw': 'App-Use : Create virtual desktops for AI agents to focus on specific apps.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'App-Use lets you scope agents to just the apps they need. Instead of full desktop access, say ""only work with Safari and Notes"" or ""just control iPhone Mirroring"" - visual isolation without new processes for perfectly focused automation.', 'raw': 'App-Use lets you scope agents to just the apps they need. Instead of full desktop access, say ""only work with Safari and Notes"" or ""just control iPhone Mirroring"" - visual isolation without new processes for perfectly focused automation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Running computer-use on the entire desktop often causes agent hallucinations and loss of focus when they see irrelevant windows and UI elements. App-Use solves this by creating composited views where agents only see what matters, dramatically improving task completion accuracy', 'raw': 'Running computer-use on the entire desktop often causes agent hallucinations and loss of focus when they see irrelevant windows and UI elements. App-Use solves this by creating composited views where agents only see what matters, dramatically improving task completion accuracy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What you can build: Research agents working in Safari while writing agents draft in Notes, iPhone automation for messages and reminders, parallel testing across isolated app sessions, or teams of specialized agents working simultaneously without interference.', 'raw': 'What you can build: Research agents working in Safari while writing agents draft in Notes, iPhone automation for messages and reminders, parallel testing across isolated app sessions, or teams of specialized agents working simultaneously without interference.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Currently macOS-only (Quartz compositing engine). ', 'raw': 'Currently macOS-only (Quartz compositing engine). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the full guide: ', 'raw': 'Read the full guide: '}, {'type': 'link', 'href': 'https://trycua.com/blog/app-use', 'raw': 'https://trycua.com/blog/app-use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Github : ', 'raw': 'Github : '}, {'type': 'link', 'href': 'https://github.com/trycua/cua', 'raw': 'https://github.com/trycua/cua'}]","App-Use : Create virtual desktops for AI agents to focus on specific apps. + +App-Use lets you scope agents to just the apps they need. Instead of full desktop access, say ""only work with Safari and Notes"" or ""just control iPhone Mirroring"" - visual isolation without new processes for perfectly focused automation. + +Running computer-use on the entire desktop often causes agent hallucinations and loss of focus when they see irrelevant windows and UI elements. App-Use solves this by creating composited views where agents only see what matters, dramatically improving task completion accuracy + +What you can build: Research agents working in Safari while writing agents draft in Notes, iPhone automation for messages and reminders, parallel testing across isolated app sessions, or teams of specialized agents working simultaneously without interference. + +Currently macOS-only (Quartz compositing engine). + +Read the full guide: https://trycua.com/blog/app-use + +Github : https://github.com/trycua/cua","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66db0c02ba8010cc014ea962/OGU7PAvrFm7io-sj-Rr0N.mp4'}]",[],"[{'reaction': '🔥', 'users': ['dhruv3006', 'drwlf', 'LPX55'], 'count': 3}, {'reaction': '🚀', 'users': ['dhruv3006', 'John6666', 'niko91i'], 'count': 3}, {'reaction': '❤️', 'users': ['dhruv3006'], 'count': 1}]",2025-06-02 08:04:31,2025-06-02 08:04:31.977,[],/posts/dhruv3006/943709012268825,2544,"{'language': 'en', 'probability': 0.8628668189048767}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png,122.0,Asankhaya Sharma,codelion,495839598023200,"[{'type': 'text', 'value': '🧠 We just implemented Andrej Karpathy\'s ""third paradigm"" for LLM learning!', 'raw': '🧠 We just implemented Andrej Karpathy\'s ""third paradigm"" for LLM learning!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'System Prompt Learning (SPL) enables LLMs to automatically learn problem-solving strategies from experience, rather than relying on static prompts.', 'raw': 'System Prompt Learning (SPL) enables LLMs to automatically learn problem-solving strategies from experience, rather than relying on static prompts.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 How it works:', 'raw': '🚀 How it works:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Your LLM builds a database of effective strategies, selects the best ones for each problem, and refines them over time based on success rates.', 'raw': 'Your LLM builds a database of effective strategies, selects the best ones for each problem, and refines them over time based on success rates.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Results across math benchmarks:', 'raw': '📊 Results across math benchmarks:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Arena Hard: 29% → 37.6% (+8.6%)', 'raw': 'Arena Hard: 29% → 37.6% (+8.6%)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AIME24: 23.33% → 30% (+6.67%)', 'raw': 'AIME24: 23.33% → 30% (+6.67%)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'OptILLMBench: 61% → 65% (+4%)', 'raw': 'OptILLMBench: 61% → 65% (+4%)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The best part? All strategies are human-readable and the system gets progressively better at problem types you use frequently.', 'raw': 'The best part? All strategies are human-readable and the system gets progressively better at problem types you use frequently.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key benefits:', 'raw': '✨ Key benefits:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔄 Cumulative learning over time', 'raw': '🔄 Cumulative learning over time'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 Transparent, inspectable strategies ', 'raw': '📖 Transparent, inspectable strategies '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔌 Works with any OpenAI-compatible API', 'raw': '🔌 Works with any OpenAI-compatible API'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡ Simple integration: just add ""spl-"" prefix to your model', 'raw': '⚡ Simple integration: just add ""spl-"" prefix to your model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Built as an open-source plugin in optillm. After 500 queries, our system developed 129 strategies and refined 97 of them!', 'raw': 'Built as an open-source plugin in optillm. After 500 queries, our system developed 129 strategies and refined 97 of them!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This feels like a genuine step toward AI that learns from experience while staying completely interpretable.', 'raw': 'This feels like a genuine step toward AI that learns from experience while staying completely interpretable.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 GitHub: ', 'raw': '🔗 GitHub: '}, {'type': 'link', 'href': 'https://github.com/codelion/optillm/tree/main/optillm/plugins/spl', 'raw': 'https://github.com/codelion/optillm/tree/main/optillm/plugins/spl'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 Full article: ', 'raw': '📖 Full article: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/codelion/system-prompt-learning', 'raw': 'https://huggingface.co/blog/codelion/system-prompt-learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐦 Original Karpathy tweet: ', 'raw': '🐦 Original Karpathy tweet: '}, {'type': 'link', 'href': 'https://x.com/karpathy/status/1921368644069765486', 'raw': 'https://x.com/karpathy/status/1921368644069765486'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have you experimented with advanced system prompting? What strategies would you want your LLM to learn?', 'raw': 'Have you experimented with advanced system prompting? What strategies would you want your LLM to learn?'}]","🧠 We just implemented Andrej Karpathy's ""third paradigm"" for LLM learning! + +System Prompt Learning (SPL) enables LLMs to automatically learn problem-solving strategies from experience, rather than relying on static prompts. + +🚀 How it works: +Your LLM builds a database of effective strategies, selects the best ones for each problem, and refines them over time based on success rates. + +📊 Results across math benchmarks: +Arena Hard: 29% → 37.6% (+8.6%) +AIME24: 23.33% → 30% (+6.67%) +OptILLMBench: 61% → 65% (+4%) + +The best part? All strategies are human-readable and the system gets progressively better at problem types you use frequently. + +✨ Key benefits: +🔄 Cumulative learning over time +📖 Transparent, inspectable strategies +🔌 Works with any OpenAI-compatible API +⚡ Simple integration: just add ""spl-"" prefix to your model + +Built as an open-source plugin in optillm. After 500 queries, our system developed 129 strategies and refined 97 of them! + +This feels like a genuine step toward AI that learns from experience while staying completely interpretable. + +🔗 GitHub: https://github.com/codelion/optillm/tree/main/optillm/plugins/spl +📖 Full article: https://huggingface.co/blog/codelion/system-prompt-learning +🐦 Original Karpathy tweet: https://x.com/karpathy/status/1921368644069765486 + +Have you experimented with advanced system prompting? What strategies would you want your LLM to learn?",[],[],"[{'reaction': '🚀', 'users': ['codelion', 'John6666', 'theainerd', 'victor', 'nomadicsynth', 'codybanks1654'], 'count': 6}, {'reaction': '🔥', 'users': ['codelion', 'whitebill', 'happy8825', 'Spectral-Phoenix', 'codybanks1654'], 'count': 5}, {'reaction': '❤️', 'users': ['codelion', 'Rybens', 'Sam4rano', 'Dimitrk', 'codybanks1654'], 'count': 5}, {'reaction': '➕', 'users': ['codelion', 'codybanks1654'], 'count': 2}]",2025-06-02 07:19:50,2025-06-02 07:19:50.682,[],/posts/codelion/495839598023200,3409,"{'language': 'en', 'probability': 0.840640127658844}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,937623008756207,"[{'type': 'text', 'value': 'CausVid LoRA V2 of Wan 2.1 Brings Massive Quality Improvements, Better Colors and Saturation > ', 'raw': 'CausVid LoRA V2 of Wan 2.1 Brings Massive Quality Improvements, Better Colors and Saturation > '}, {'type': 'link', 'href': 'https://youtu.be/1rAwZv0hEcU', 'raw': 'https://youtu.be/1rAwZv0hEcU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tutorial video : ', 'raw': 'Tutorial video : '}, {'type': 'link', 'href': 'https://youtu.be/1rAwZv0hEcU', 'raw': 'https://youtu.be/1rAwZv0hEcU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'CausVid LoRA V2 of Wan 2.1 is just amazing. In this tutorial video I will show you how to use the most powerful video generation model Wan 2.1 with CausVid LoRA effortlessly. Normally, Wan 2.1 requires 50 steps to get excellent results. With CausiVid LoRA we get such excellent results only in 8 steps. Morever, with newest version 2, now the quality is almost identical to base Wan 2.1. I will show how to download and use in SwarmUI with 1-click to apply download and apply presets. We will also leverage of ComfyUI and fastest attention (Sage Attention).', 'raw': 'CausVid LoRA V2 of Wan 2.1 is just amazing. In this tutorial video I will show you how to use the most powerful video generation model Wan 2.1 with CausVid LoRA effortlessly. Normally, Wan 2.1 requires 50 steps to get excellent results. With CausiVid LoRA we get such excellent results only in 8 steps. Morever, with newest version 2, now the quality is almost identical to base Wan 2.1. I will show how to download and use in SwarmUI with 1-click to apply download and apply presets. We will also leverage of ComfyUI and fastest attention (Sage Attention).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗Follow below link to download the zip file that contains SwarmUI installer and AI models downloader Gradio App - the one used in the tutorial ⤵️', 'raw': '🔗Follow below link to download the zip file that contains SwarmUI installer and AI models downloader Gradio App - the one used in the tutorial ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/SwarmUI-Installer-AI-Videos-Downloader-114517862', 'raw': 'https://www.patreon.com/posts/SwarmUI-Installer-AI-Videos-Downloader-114517862'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ CausVid Main Tutorial : ', 'raw': '▶️ CausVid Main Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/fTzlQ0tjxj0', 'raw': 'https://youtu.be/fTzlQ0tjxj0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ How to install SwarmUI main tutorial : ', 'raw': '▶️ How to install SwarmUI main tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/fTzlQ0tjxj0', 'raw': 'https://youtu.be/fTzlQ0tjxj0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗Follow below link to download the zip file that contains ComfyUI 1-click installer that has all the Flash Attention, Sage Attention, xFormers, Triton, DeepSpeed, RTX 5000 series support ⤵️', 'raw': '🔗Follow below link to download the zip file that contains ComfyUI 1-click installer that has all the Flash Attention, Sage Attention, xFormers, Triton, DeepSpeed, RTX 5000 series support ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/Advanced-ComfyUI-1-Click-Installer-105023709', 'raw': 'https://www.patreon.com/posts/Advanced-ComfyUI-1-Click-Installer-105023709'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Python, Git, CUDA, C++, FFMPEG, MSVC installation tutorial - needed for ComfyUI ⤵️', 'raw': '🔗 Python, Git, CUDA, C++, FFMPEG, MSVC installation tutorial - needed for ComfyUI ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://youtu.be/DrhUHnYfwC0', 'raw': 'https://youtu.be/DrhUHnYfwC0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 SECourses Official Discord 10500+ Members ⤵️', 'raw': '🔗 SECourses Official Discord 10500+ Members ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388', 'raw': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️', 'raw': '🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://github.com/FurkanGozukara/Stable-Diffusion', 'raw': 'https://github.com/FurkanGozukara/Stable-Diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 SECourses Official Reddit - Stay Subscribed To Learn All The News and More ⤵️', 'raw': '🔗 SECourses Official Reddit - Stay Subscribed To Learn All The News and More ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.reddit.com/r/SECourses/', 'raw': 'https://www.reddit.com/r/SECourses/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","CausVid LoRA V2 of Wan 2.1 Brings Massive Quality Improvements, Better Colors and Saturation > https://youtu.be/1rAwZv0hEcU + +Tutorial video : https://youtu.be/1rAwZv0hEcU + +CausVid LoRA V2 of Wan 2.1 is just amazing. In this tutorial video I will show you how to use the most powerful video generation model Wan 2.1 with CausVid LoRA effortlessly. Normally, Wan 2.1 requires 50 steps to get excellent results. With CausiVid LoRA we get such excellent results only in 8 steps. Morever, with newest version 2, now the quality is almost identical to base Wan 2.1. I will show how to download and use in SwarmUI with 1-click to apply download and apply presets. We will also leverage of ComfyUI and fastest attention (Sage Attention). + +🔗Follow below link to download the zip file that contains SwarmUI installer and AI models downloader Gradio App - the one used in the tutorial ⤵️ +▶️ https://www.patreon.com/posts/SwarmUI-Installer-AI-Videos-Downloader-114517862 + +▶️ CausVid Main Tutorial : https://youtu.be/fTzlQ0tjxj0 + +▶️ How to install SwarmUI main tutorial : https://youtu.be/fTzlQ0tjxj0 + +🔗Follow below link to download the zip file that contains ComfyUI 1-click installer that has all the Flash Attention, Sage Attention, xFormers, Triton, DeepSpeed, RTX 5000 series support ⤵️ +▶️ https://www.patreon.com/posts/Advanced-ComfyUI-1-Click-Installer-105023709 + +🔗 Python, Git, CUDA, C++, FFMPEG, MSVC installation tutorial - needed for ComfyUI ⤵️ +▶️ https://youtu.be/DrhUHnYfwC0 + +🔗 SECourses Official Discord 10500+ Members ⤵️ +▶️ https://discord.com/servers/software-engineering-courses-secourses-772774097734074388 + +🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️ +▶️ https://github.com/FurkanGozukara/Stable-Diffusion + +🔗 SECourses Official Reddit - Stay Subscribed To Learn All The News and More ⤵️ +▶️ https://www.reddit.com/r/SECourses/ + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/sZrT2FaieO98uMHFAPVmS.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/guYkM_fHK3-8gahcNFb5M.mp4'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'stellaray777', 'ykarout'], 'count': 3}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666', 'stellaray777'], 'count': 3}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'stellaray777'], 'count': 2}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'stellaray777'], 'count': 2}, {'reaction': '🤗', 'users': ['MonsterMMORPG', 'stellaray777'], 'count': 2}, {'reaction': '😎', 'users': ['MonsterMMORPG', 'stellaray777'], 'count': 2}, {'reaction': '➕', 'users': ['MonsterMMORPG', 'stellaray777'], 'count': 2}, {'reaction': '🧠', 'users': ['MonsterMMORPG', 'stellaray777'], 'count': 2}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-06-01 23:48:33,2025-06-01 23:48:33.402,[],/posts/MonsterMMORPG/937623008756207,2855,"{'language': 'en', 'probability': 0.7510440945625305}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64a2cd7342b2a76a308b3daf/o6SV0ilIA1sov088MaN9j.jpeg,16.0,Maximus Powers,maximuspowers,979692979265259,"[{'type': 'text', 'value': '♔ Chess players:', 'raw': '♔ Chess players:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I made an app that encodes chess moves and initiative as musical notes. I'd love to hear ideas from more advanced players about how I could improve this."", 'raw': ""I made an app that encodes chess moves and initiative as musical notes. I'd love to hear ideas from more advanced players about how I could improve this.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My vision for this project was to improve pattern recognition of openings, tactics, and positions through artificial synesthesia. ', 'raw': 'My vision for this project was to improve pattern recognition of openings, tactics, and positions through artificial synesthesia. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Please try it out and let me know if you have any ideas for it:', 'raw': 'Please try it out and let me know if you have any ideas for it:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'maximuspowers/musical-chess'}, 'url': 'https://huggingface.co/spaces/maximuspowers/musical-chess', 'raw': 'https://huggingface.co/spaces/maximuspowers/musical-chess'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks😁', 'raw': 'Thanks😁'}]","♔ Chess players: +I made an app that encodes chess moves and initiative as musical notes. I'd love to hear ideas from more advanced players about how I could improve this. + +My vision for this project was to improve pattern recognition of openings, tactics, and positions through artificial synesthesia. + +Please try it out and let me know if you have any ideas for it: +https://huggingface.co/spaces/maximuspowers/musical-chess + +Thanks😁",[],[],"[{'reaction': '🤗', 'users': ['John6666', 'stellaray777', 'shainaraza'], 'count': 3}, {'reaction': '❤️', 'users': ['MayeulSGC', 'stellaray777', 'shainaraza'], 'count': 3}, {'reaction': '🔥', 'users': ['k-young', 'stellaray777', 'shainaraza'], 'count': 3}]",2025-06-01 22:32:18,2025-06-01 22:32:18.514,[],/posts/maximuspowers/979692979265259,1897,"{'language': 'en', 'probability': 0.9568459987640381}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,258710418808496,"[{'type': 'text', 'value': 'New GUI model by Salesforce AI & Uni HK: Jedi ', 'raw': 'New GUI model by Salesforce AI & Uni HK: Jedi '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'tianbaoxiexxx/Jedi'}, 'url': 'https://huggingface.co/spaces/tianbaoxiexxx/Jedi', 'raw': 'https://huggingface.co/spaces/tianbaoxiexxx/Jedi'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'xlangai/Jedi-7B-1080p'}, 'url': 'https://huggingface.co/xlangai/Jedi-7B-1080p', 'raw': 'https://huggingface.co/xlangai/Jedi-7B-1080p'}, {'type': 'text', 'value': ' 🤗', 'raw': ' 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Based on Qwen2.5-VL with Apache 2.0 license', 'raw': 'Based on Qwen2.5-VL with Apache 2.0 license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'prompt with below screenshot → select ""find more""', 'raw': 'prompt with below screenshot → select ""find more""'}, {'type': 'new_line', 'raw': '\n'}]","New GUI model by Salesforce AI & Uni HK: Jedi +https://huggingface.co/spaces/tianbaoxiexxx/Jedi https://huggingface.co/xlangai/Jedi-7B-1080p 🤗 +Based on Qwen2.5-VL with Apache 2.0 license + +prompt with below screenshot → select ""find more"" +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/eXhDhhdEy2cDy_AgNBUg2.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'programmnix-askui', 'AtAndDev'], 'count': 3}]",2025-06-01 13:21:20,2025-06-09 13:56:15.047,"[{'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786, 'isFollowing': False}]",/posts/merve/258710418808496,1152,"{'language': 'en', 'probability': 0.6869575381278992}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,204958200717570,"[{'type': 'text', 'value': '13 Awesome MCP Servers', 'raw': '13 Awesome MCP Servers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MCP changed how agents connect with tools. ', 'raw': 'MCP changed how agents connect with tools. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After writing the most read explanation of MCP on Hugging Face (', 'raw': 'After writing the most read explanation of MCP on Hugging Face ('}, {'type': 'link', 'href': 'https://huggingface.co/blog/Kseniase/mcp', 'raw': 'https://huggingface.co/blog/Kseniase/mcp'}, {'type': 'text', 'value': '), we chose this 13 awesome MCP servers that you can work with:', 'raw': '), we chose this 13 awesome MCP servers that you can work with:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Agentset MCP -> ', 'raw': '1. Agentset MCP -> '}, {'type': 'link', 'href': 'https://github.com/agentset-ai/mcp-server', 'raw': 'https://github.com/agentset-ai/mcp-server'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For efficient and quick building of intelligent, doc-based apps using open-source Agentset platform for RAG', 'raw': 'For efficient and quick building of intelligent, doc-based apps using open-source Agentset platform for RAG'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. GitHub MCP Server -> ', 'raw': '2. GitHub MCP Server -> '}, {'type': 'link', 'href': 'https://github.com/github/github-mcp-server', 'raw': 'https://github.com/github/github-mcp-server'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Integrates GitHub APIs into your workflow, allowing to build AI tools and apps that interact with GitHub's ecosystem"", 'raw': ""Integrates GitHub APIs into your workflow, allowing to build AI tools and apps that interact with GitHub's ecosystem""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. arXiv MCP -> ', 'raw': '3. arXiv MCP -> '}, {'type': 'link', 'href': 'https://github.com/andybrandt/mcp-simple-arxiv', 'raw': 'https://github.com/andybrandt/mcp-simple-arxiv'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Allows working with research papers on arXiv through effective search and access to their metadata, abstracts, and links ', 'raw': 'Allows working with research papers on arXiv through effective search and access to their metadata, abstracts, and links '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. MCP Run Python -> ', 'raw': '4. MCP Run Python -> '}, {'type': 'link', 'href': 'https://github.com/pydantic/pydantic-ai/tree/main/mcp-run-python', 'raw': 'https://github.com/pydantic/pydantic-ai/tree/main/mcp-run-python'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enables to run Python code in a sandbox via Pyodide in Deno, so it can be isolated from the rest of the operating system ', 'raw': 'Enables to run Python code in a sandbox via Pyodide in Deno, so it can be isolated from the rest of the operating system '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. Safe Local Python Executor -> ', 'raw': '5. Safe Local Python Executor -> '}, {'type': 'link', 'href': 'https://github.com/maxim-saplin/mcp_safe_local_python_executor', 'raw': 'https://github.com/maxim-saplin/mcp_safe_local_python_executor'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A lightweight tool for running LLM-generated Python code locally, using Hugging Face’s LocalPythonExecutor (from smolagents framework) and exposing it via MCP for AI assistant integration ', 'raw': 'A lightweight tool for running LLM-generated Python code locally, using Hugging Face’s LocalPythonExecutor (from smolagents framework) and exposing it via MCP for AI assistant integration '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. Cursor MCP Installer -> ', 'raw': '6. Cursor MCP Installer -> '}, {'type': 'link', 'href': 'https://github.com/matthewdcage/cursor-mcp-installer', 'raw': 'https://github.com/matthewdcage/cursor-mcp-installer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Allows to automatically add MCP servers to Cursor for development convenience', 'raw': 'Allows to automatically add MCP servers to Cursor for development convenience'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7. Basic Memory -> ', 'raw': '7. Basic Memory -> '}, {'type': 'link', 'href': 'https://memory.basicmachines.co/docs/introduction', 'raw': 'https://memory.basicmachines.co/docs/introduction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This knowledge management system connects to LLMs and lets you build a persistent semantic graph from AI conversations with AI agents', 'raw': 'This knowledge management system connects to LLMs and lets you build a persistent semantic graph from AI conversations with AI agents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read further in the comments 👇', 'raw': 'Read further in the comments 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you like it, also subscribe to the Turing Post: ', 'raw': 'If you like it, also subscribe to the Turing Post: '}, {'type': 'link', 'href': 'https://www.turingpost.com/subscribe', 'raw': 'https://www.turingpost.com/subscribe'}]","13 Awesome MCP Servers + +MCP changed how agents connect with tools. + +After writing the most read explanation of MCP on Hugging Face (https://huggingface.co/blog/Kseniase/mcp), we chose this 13 awesome MCP servers that you can work with: + +1. Agentset MCP -> https://github.com/agentset-ai/mcp-server +For efficient and quick building of intelligent, doc-based apps using open-source Agentset platform for RAG + +2. GitHub MCP Server -> https://github.com/github/github-mcp-server +Integrates GitHub APIs into your workflow, allowing to build AI tools and apps that interact with GitHub's ecosystem + +3. arXiv MCP -> https://github.com/andybrandt/mcp-simple-arxiv +Allows working with research papers on arXiv through effective search and access to their metadata, abstracts, and links + +4. MCP Run Python -> https://github.com/pydantic/pydantic-ai/tree/main/mcp-run-python +Enables to run Python code in a sandbox via Pyodide in Deno, so it can be isolated from the rest of the operating system + +5. Safe Local Python Executor -> https://github.com/maxim-saplin/mcp_safe_local_python_executor +A lightweight tool for running LLM-generated Python code locally, using Hugging Face’s LocalPythonExecutor (from smolagents framework) and exposing it via MCP for AI assistant integration + +6. Cursor MCP Installer -> https://github.com/matthewdcage/cursor-mcp-installer +Allows to automatically add MCP servers to Cursor for development convenience + +7. Basic Memory -> https://memory.basicmachines.co/docs/introduction +This knowledge management system connects to LLMs and lets you build a persistent semantic graph from AI conversations with AI agents + +Read further in the comments 👇 + +If you like it, also subscribe to the Turing Post: https://www.turingpost.com/subscribe",[],[],"[{'reaction': '🚀', 'users': ['John6666', 'victor', 'r4yd3n', 'elon-fask'], 'count': 4}, {'reaction': '👀', 'users': ['cgus'], 'count': 1}]",2025-06-01 11:00:47,2025-06-01 13:34:10.926,"[{'_id': '64838b28c235ef76b63e4999', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg', 'fullname': 'Ksenia Se', 'name': 'Kseniase', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 971, 'isFollowing': False}]",/posts/Kseniase/204958200717570,2047,"{'language': 'en', 'probability': 0.7822470664978027}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,879526170068033,"[{'type': 'text', 'value': '🎨 FLUX VIDEO Generation - All-in-One AI Image/Video/Audio Generator', 'raw': '🎨 FLUX VIDEO Generation - All-in-One AI Image/Video/Audio Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Introduction', 'raw': '🚀 Introduction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FLUX VIDEO Generation is an all-in-one AI creative tool that generates images, videos, and audio from text prompts, powered by NVIDIA H100 GPU for lightning-fast processing!', 'raw': 'FLUX VIDEO Generation is an all-in-one AI creative tool that generates images, videos, and audio from text prompts, powered by NVIDIA H100 GPU for lightning-fast processing!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Flux-VIDEO'}, 'url': 'https://huggingface.co/spaces/ginigen/Flux-VIDEO', 'raw': 'https://huggingface.co/spaces/ginigen/Flux-VIDEO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Text → Image → Video 🖼️➡️🎬', 'raw': '1️⃣ Text → Image → Video 🖼️➡️🎬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Generate high-quality images from Korean/English prompts', 'raw': 'Generate high-quality images from Korean/English prompts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Transform still images into natural motion videos', 'raw': 'Transform still images into natural motion videos'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Multiple size presets (Instagram, YouTube, Facebook, etc.)', 'raw': 'Multiple size presets (Instagram, YouTube, Facebook, etc.)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: 1-4 seconds / Full version: up to 60 seconds', 'raw': 'Demo: 1-4 seconds / Full version: up to 60 seconds'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Image Aspect Ratio Change 🎭', 'raw': '2️⃣ Image Aspect Ratio Change 🎭'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Freely adjust image aspect ratios', 'raw': 'Freely adjust image aspect ratios'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Expand images with outpainting technology', 'raw': 'Expand images with outpainting technology'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5 alignment options (Center, Left, Right, Top, Bottom)', 'raw': '5 alignment options (Center, Left, Right, Top, Bottom)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Real-time preview functionality', 'raw': 'Real-time preview functionality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Video + Audio Generation 🎵', 'raw': '3️⃣ Video + Audio Generation 🎵'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Add AI-generated audio to videos', 'raw': 'Add AI-generated audio to videos'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Korean prompt support (auto-translation)', 'raw': 'Korean prompt support (auto-translation)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Context-aware sound generation', 'raw': 'Context-aware sound generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Powered by MMAudio technology', 'raw': 'Powered by MMAudio technology'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ Tech Stack', 'raw': '🛠️ Tech Stack'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Image Generation: FLUX, Stable Diffusion XL', 'raw': 'Image Generation: FLUX, Stable Diffusion XL'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video Generation: TeaCache optimization', 'raw': 'Video Generation: TeaCache optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Audio Generation: MMAudio (44kHz high-quality)', 'raw': 'Audio Generation: MMAudio (44kHz high-quality)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Outpainting: ControlNet Union', 'raw': 'Outpainting: ControlNet Union'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Infrastructure: NVIDIA H100 GPU for ultra-fast generation', 'raw': 'Infrastructure: NVIDIA H100 GPU for ultra-fast generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 How to Use', 'raw': '💡 How to Use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Select your desired tab', 'raw': 'Select your desired tab'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enter your prompt (Korean/English supported!)', 'raw': 'Enter your prompt (Korean/English supported!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Adjust settings', 'raw': 'Adjust settings'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Click generate button', 'raw': 'Click generate button'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Use Cases', 'raw': '🎯 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 Social media content creation', 'raw': '📱 Social media content creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎥 YouTube Shorts/Reels', 'raw': '🎥 YouTube Shorts/Reels'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Presentation materials', 'raw': '📊 Presentation materials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Creative artwork', 'raw': '🎨 Creative artwork'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎵 Background sound generation', 'raw': '🎵 Background sound generation'}]","🎨 FLUX VIDEO Generation - All-in-One AI Image/Video/Audio Generator + +🚀 Introduction +FLUX VIDEO Generation is an all-in-one AI creative tool that generates images, videos, and audio from text prompts, powered by NVIDIA H100 GPU for lightning-fast processing! + +https://huggingface.co/spaces/ginigen/Flux-VIDEO + +✨ Key Features +1️⃣ Text → Image → Video 🖼️➡️🎬 + +Generate high-quality images from Korean/English prompts +Transform still images into natural motion videos +Multiple size presets (Instagram, YouTube, Facebook, etc.) +Demo: 1-4 seconds / Full version: up to 60 seconds + +2️⃣ Image Aspect Ratio Change 🎭 + +Freely adjust image aspect ratios +Expand images with outpainting technology +5 alignment options (Center, Left, Right, Top, Bottom) +Real-time preview functionality + +3️⃣ Video + Audio Generation 🎵 + +Add AI-generated audio to videos +Korean prompt support (auto-translation) +Context-aware sound generation +Powered by MMAudio technology + +🛠️ Tech Stack + +Image Generation: FLUX, Stable Diffusion XL +Video Generation: TeaCache optimization +Audio Generation: MMAudio (44kHz high-quality) +Outpainting: ControlNet Union +Infrastructure: NVIDIA H100 GPU for ultra-fast generation + +💡 How to Use + +Select your desired tab +Enter your prompt (Korean/English supported!) +Adjust settings +Click generate button + +🎯 Use Cases + +📱 Social media content creation +🎥 YouTube Shorts/Reels +📊 Presentation materials +🎨 Creative artwork +🎵 Background sound generation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/5QZNgjy4GpAhenXZrU6V5.webp'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/eteFR-mpFARXSv0qzpUBb.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/Y3l3wklWnHIqL1dHLgKZC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/JgefhcJQghOO7KBu-zjAU.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/oyWbj_GJyVYvQJKMNRehS.png'}]",[],"[{'reaction': '🔥', 'users': ['ginipick', 'immunobiotech', 'SatSha', 'aiqcamp', 'aiqtech', 'greywood53', 'KHREHANKASHMIRI', 'fantaxy', 'John6666', 'Renxer', 'keeperballon', 'jige17', 'SevenLem', 'openfree', 'victor'], 'count': 15}, {'reaction': '🚀', 'users': ['ginipick', 'aiqtech', 'viidfedenwad'], 'count': 3}, {'reaction': '🤗', 'users': ['ginipick', 'Chief-Inspector', 'sungin3'], 'count': 3}, {'reaction': '❤️', 'users': ['Renxer', 'jinjinye'], 'count': 2}]",2025-06-01 05:33:02,2025-06-01 09:38:41.191,"[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957, 'isFollowing': False}]",/posts/ginipick/879526170068033,4334,"{'language': 'en', 'probability': 0.6811960339546204}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/658a4c914bb41498f7d5e3ca/zMJjxfazi9ePc7GZ1jRAE.jpeg,66.0,Pro Creations,ProCreations,924022342209071,"[{'type': 'text', 'value': 'Introducing my custom AI lab!', 'raw': 'Introducing my custom AI lab!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This huggingface shows a bunch of fundamental visuals of AI’s training in real time. ', 'raw': 'This huggingface shows a bunch of fundamental visuals of AI’s training in real time. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ProCreations/ai-labs'}, 'url': 'https://huggingface.co/spaces/ProCreations/ai-labs', 'raw': 'https://huggingface.co/spaces/ProCreations/ai-labs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Updates coming soon with a walk through mode that teaches you what it all means!', 'raw': 'Updates coming soon with a walk through mode that teaches you what it all means!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Meanwhile if you want to learn about AI and not just see it,', 'raw': 'Meanwhile if you want to learn about AI and not just see it,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ProCreations/learn-ai'}, 'url': 'https://huggingface.co/spaces/ProCreations/learn-ai', 'raw': 'https://huggingface.co/spaces/ProCreations/learn-ai'}]","Introducing my custom AI lab! + +This huggingface shows a bunch of fundamental visuals of AI’s training in real time. + +https://huggingface.co/spaces/ProCreations/ai-labs + +Updates coming soon with a walk through mode that teaches you what it all means! + +Meanwhile if you want to learn about AI and not just see it, +https://huggingface.co/spaces/ProCreations/learn-ai",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-06-01 03:05:05,2025-06-01 03:05:05.104,[],/posts/ProCreations/924022342209071,291,"{'language': 'en', 'probability': 0.8972842693328857}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f805de9c81260ff8881ee/WXfMTAlKuHAi4M1fySCJ8.jpeg,178.0,ℏεsam,hesamation,260011784391977,"[{'type': 'text', 'value': 'I really like how this seven-stage pipeline was laid out in the Ultimate Guide to Fine-Tuning book.', 'raw': 'I really like how this seven-stage pipeline was laid out in the Ultimate Guide to Fine-Tuning book.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It gives an overview, then goes into detail for each stage, even providing best practices. ', 'raw': 'It gives an overview, then goes into detail for each stage, even providing best practices. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It’s 115 pages on arxiv, definitely worth a read.', 'raw': 'It’s 115 pages on arxiv, definitely worth a read.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out: ', 'raw': 'Check it out: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2408.13296', 'raw': 'https://arxiv.org/abs/2408.13296'}]","I really like how this seven-stage pipeline was laid out in the Ultimate Guide to Fine-Tuning book. + +It gives an overview, then goes into detail for each stage, even providing best practices. + +It’s 115 pages on arxiv, definitely worth a read. + +Check it out: https://arxiv.org/abs/2408.13296","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f805de9c81260ff8881ee/fdq5wCNpEOOYOEZyLTRfI.png'}]",[],"[{'reaction': '🔥', 'users': ['hesamation', 'John6666', 'ariG23498', 'ialwayslikedgrime', 'Misha24-10', 'eramax', 'hackur'], 'count': 7}, {'reaction': '🧠', 'users': ['hackur'], 'count': 1}]",2025-05-27 16:50:11,2025-05-27 16:50:11.280,[],/posts/hesamation/260011784391977,2609,"{'language': 'en', 'probability': 0.896704375743866}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg,29.0,Sk md saad amin,Reality123b,144222035432726,"[{'type': 'text', 'value': 'does merging models count as creating a new model myself?', 'raw': 'does merging models count as creating a new model myself?'}, {'type': 'new_line', 'raw': '\n'}]","does merging models count as creating a new model myself? +",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-05-27 16:31:44,2025-05-27 16:31:44.640,[],/posts/Reality123b/144222035432726,228,"{'language': 'en', 'probability': 0.9858456254005432}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,601276092251484,"[{'type': 'text', 'value': 'emerging trend: models that can understand image + text and generate image + text ', 'raw': 'emerging trend: models that can understand image + text and generate image + text '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""don't miss out ⤵️"", 'raw': ""don't miss out ⤵️""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> MMaDA: single 8B diffusion model aligned with CoT (reasoning!) + UniGRPO ', 'raw': '> MMaDA: single 8B diffusion model aligned with CoT (reasoning!) + UniGRPO '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Gen-Verse/MMaDA'}, 'url': 'https://huggingface.co/spaces/Gen-Verse/MMaDA', 'raw': 'https://huggingface.co/spaces/Gen-Verse/MMaDA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> BAGEL: 7B MoT model based on Qwen2.5, SigLIP-so-400M, Flux VAE ', 'raw': '> BAGEL: 7B MoT model based on Qwen2.5, SigLIP-so-400M, Flux VAE '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ByteDance-Seed/BAGEL'}, 'url': 'https://huggingface.co/spaces/ByteDance-Seed/BAGEL', 'raw': 'https://huggingface.co/spaces/ByteDance-Seed/BAGEL'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'both by ByteDance! 😱', 'raw': 'both by ByteDance! 😱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I keep track of all any input → any output models here ', 'raw': 'I keep track of all any input → any output models here '}, {'type': 'link', 'href': 'https://huggingface.co/collections/merve/any-to-any-models-6822042ee8eb7fb5e38f9b62', 'raw': 'https://huggingface.co/collections/merve/any-to-any-models-6822042ee8eb7fb5e38f9b62'}]","emerging trend: models that can understand image + text and generate image + text + +don't miss out ⤵️ +> MMaDA: single 8B diffusion model aligned with CoT (reasoning!) + UniGRPO https://huggingface.co/spaces/Gen-Verse/MMaDA +> BAGEL: 7B MoT model based on Qwen2.5, SigLIP-so-400M, Flux VAE https://huggingface.co/spaces/ByteDance-Seed/BAGEL +both by ByteDance! 😱 + +I keep track of all any input → any output models here https://huggingface.co/collections/merve/any-to-any-models-6822042ee8eb7fb5e38f9b62","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/OCMImxdAx8VHJHi-2pdpJ.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'pcuenq', 'AtAndDev', 'Yukkkop'], 'count': 4}, {'reaction': '❤️', 'users': ['DonkeySmall'], 'count': 1}]",2025-05-27 14:54:34,2025-05-27 17:12:00.589,"[{'_id': '675a5d3a049d7f55bcebe362', 'avatarUrl': '/avatars/7997d995fd382f1479adf75355491b59.svg', 'fullname': 'Projecta', 'name': 'ProjectAdotai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/merve/601276092251484,2561,"{'language': 'en', 'probability': 0.7673007845878601}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,668826641388828,"[{'type': 'text', 'value': 'HunyuanPortrait 🔥 video model by Tencent Hunyuan team.', 'raw': 'HunyuanPortrait 🔥 video model by Tencent Hunyuan team.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.18860'}, 'url': 'https://huggingface.co/papers/2503.18860', 'raw': 'https://huggingface.co/papers/2503.18860', 'label': 'HunyuanPortrait: Implicit Condition Control for Enhanced Portrait\n Animation (2503.18860)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'tencent/HunyuanPortrait'}, 'url': 'https://huggingface.co/tencent/HunyuanPortrait', 'raw': 'https://huggingface.co/tencent/HunyuanPortrait'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Portrait animation from just one image + a video prompt', 'raw': '✨Portrait animation from just one image + a video prompt'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Diffusion-based, implicit motion control', 'raw': '✨Diffusion-based, implicit motion control'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Superior temporal consistency & detail', 'raw': '✨Superior temporal consistency & detail'}, {'type': 'new_line', 'raw': '\n'}]","HunyuanPortrait 🔥 video model by Tencent Hunyuan team. + +https://huggingface.co/papers/2503.18860 +https://huggingface.co/tencent/HunyuanPortrait + +✨Portrait animation from just one image + a video prompt +✨Diffusion-based, implicit motion control +✨Superior temporal consistency & detail +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/UuW3jl0n6Nvorqxt7nOXr.mp4'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'DonkeySmall', 'Sultan-1215', 'linoyts', 'JoPmt', 'alibave491'], 'count': 6}]",2025-05-27 14:20:08,2025-05-27 14:20:08.908,[],/posts/AdinaY/668826641388828,1937,"{'language': 'en', 'probability': 0.7281726598739624}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/bl8SolAqKeN_Q6u2tmzNn.png,,Stephen Campbell,sencael,651336833577575,"[{'type': 'text', 'value': 'Beyond Interpretability: A Participatory Path to AI Alignment', 'raw': 'Beyond Interpretability: A Participatory Path to AI Alignment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://stephenrcampbell.substack.com/p/beyond-interpretability-a-participatory', 'raw': 'https://stephenrcampbell.substack.com/p/beyond-interpretability-a-participatory'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""A response to Dario Amodei's "", 'raw': ""A response to Dario Amodei's ""}, {'type': 'link', 'href': 'https://www.darioamodei.com/post/the-urgency-of-interpretability', 'raw': 'https://www.darioamodei.com/post/the-urgency-of-interpretability'}]","Beyond Interpretability: A Participatory Path to AI Alignment +https://stephenrcampbell.substack.com/p/beyond-interpretability-a-participatory +A response to Dario Amodei's https://www.darioamodei.com/post/the-urgency-of-interpretability",[],[],[],2025-05-27 14:12:34,2025-05-27 14:12:34.184,[],/posts/sencael/651336833577575,176,"{'language': 'en', 'probability': 0.7916216850280762}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d50e9ef9cbfa798c590004/FlVe8chafigMfrPpMeJRL.jpeg,133.0,Jared Sulzdorf,jsulz,851142617150819,"[{'type': 'text', 'value': 'With major model families like ', 'raw': 'With major model families like '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'Qwen'}, 'url': 'https://huggingface.co/Qwen', 'raw': 'https://huggingface.co/Qwen', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/620760a26e3b7210c2ff1943/-s1gyJfvbE1RgO5iBeNOi.png'}, {'type': 'text', 'value': ' and all of Llama from ', 'raw': ' and all of Llama from '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'meta-llama'}, 'url': 'https://huggingface.co/meta-llama', 'raw': 'https://huggingface.co/meta-llama', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646cf8084eefb026fb8fd8bc/oCTqufkdTkjyGodsx1vo1.png'}, {'type': 'text', 'value': ' on Xet, the time is right for new users and organizations to say goodbye to LFS on the Hub. ', 'raw': ' on Xet, the time is right for new users and organizations to say goodbye to LFS on the Hub. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Xet is now the default storage for new AI builders 🚀 🚀 🚀 ', 'raw': 'Xet is now the default storage for new AI builders 🚀 🚀 🚀 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Just sign up for an account, create a new model or dataset, pip install huggingface_hub and you're off to the races! "", 'raw': ""Just sign up for an account, create a new model or dataset, pip install huggingface_hub and you're off to the races! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read more here ', 'raw': 'Read more here '}, {'type': 'link', 'href': 'https://huggingface.co/changelog/xet-default-for-new-users', 'raw': 'https://huggingface.co/changelog/xet-default-for-new-users'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And for everyone with existing repositories, just sign up here ', 'raw': 'And for everyone with existing repositories, just sign up here '}, {'type': 'link', 'href': 'https://huggingface.co/join/xet', 'raw': 'https://huggingface.co/join/xet'}, {'type': 'text', 'value': "" - we'll migrate all existing repositories to Xet and all new repos you create will be Xet-backed by default. "", 'raw': "" - we'll migrate all existing repositories to Xet and all new repos you create will be Xet-backed by default. ""}]","With major model families like https://huggingface.co/Qwen and all of Llama from https://huggingface.co/meta-llama on Xet, the time is right for new users and organizations to say goodbye to LFS on the Hub. + +Xet is now the default storage for new AI builders 🚀 🚀 🚀 + +Just sign up for an account, create a new model or dataset, pip install huggingface_hub and you're off to the races! + +Read more here https://huggingface.co/changelog/xet-default-for-new-users + +And for everyone with existing repositories, just sign up here https://huggingface.co/join/xet - we'll migrate all existing repositories to Xet and all new repos you create will be Xet-backed by default. ",[],[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-05-27 13:50:35,2025-05-27 13:50:35.240,[],/posts/jsulz/851142617150819,584,"{'language': 'en', 'probability': 0.8039035201072693}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1606406298765-noauth.jpeg,368.0,Albert Villanova del Moral,albertvillanova,389895341871892,"[{'type': 'text', 'value': 'New in smolagents v1.17.0:', 'raw': 'New in smolagents v1.17.0:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Structured generation in CodeAgent 🧱', 'raw': '- Structured generation in CodeAgent 🧱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Streamable HTTP MCP support 🌐', 'raw': '- Streamable HTTP MCP support 🌐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Agent.run() returns rich RunResult 📦', 'raw': '- Agent.run() returns rich RunResult 📦'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Smarter agents, smoother workflows.', 'raw': 'Smarter agents, smoother workflows.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it now: ', 'raw': 'Try it now: '}, {'type': 'link', 'href': 'https://github.com/huggingface/smolagents/releases/tag/v1.17.0', 'raw': 'https://github.com/huggingface/smolagents/releases/tag/v1.17.0'}]","New in smolagents v1.17.0: +- Structured generation in CodeAgent 🧱 +- Streamable HTTP MCP support 🌐 +- Agent.run() returns rich RunResult 📦 + +Smarter agents, smoother workflows. +Try it now: https://github.com/huggingface/smolagents/releases/tag/v1.17.0",[],[],"[{'reaction': '🤗', 'users': ['John6666'], 'count': 1}, {'reaction': '😎', 'users': ['John6666'], 'count': 1}]",2025-05-27 12:07:19,2025-05-27 12:07:19.352,[],/posts/albertvillanova/389895341871892,627,"{'language': 'en', 'probability': 0.6194655895233154}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,719212082746895,"[{'type': 'text', 'value': 'Just completed the AI Agents course and wow, that capstone project really makes you understand how to build agents that can handle real-world complexity!', 'raw': 'Just completed the AI Agents course and wow, that capstone project really makes you understand how to build agents that can handle real-world complexity!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The final project uses the GAIA dataset - your agent has to solve tasks like analyzing Excel files, processing audio recordings, answering questions about YouTube videos, and diving into research papers. This isn't toy examples, it's the messy, multimodal stuff agents need to handle in practice."", 'raw': ""The final project uses the GAIA dataset - your agent has to solve tasks like analyzing Excel files, processing audio recordings, answering questions about YouTube videos, and diving into research papers. This isn't toy examples, it's the messy, multimodal stuff agents need to handle in practice.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Whether you’re just getting started with agents or want to go deeper with tools like LangChain, LlamaIndex, and SmolAgents, this course has tons of useful stuff. A few key insights:', 'raw': 'Whether you’re just getting started with agents or want to go deeper with tools like LangChain, LlamaIndex, and SmolAgents, this course has tons of useful stuff. A few key insights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Code agents are incredibly versatile once you get the architecture right', 'raw': '- Code agents are incredibly versatile once you get the architecture right'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The sweet spot is finding the right balance of guidance vs autonomy for each use case', 'raw': '- The sweet spot is finding the right balance of guidance vs autonomy for each use case'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Once the logic clicks, the possibilities really are endless - it's like letting LLMs break free from the chatbox"", 'raw': ""- Once the logic clicks, the possibilities really are endless - it's like letting LLMs break free from the chatbox""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The course is free and the certification deadline is July 1st, 2025. ', 'raw': 'The course is free and the certification deadline is July 1st, 2025. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The Hugging Face team built something special here. If you're tired of AI that impresses in demos but fails in practice, this is your path to building agents that actually deliver. "", 'raw': ""The Hugging Face team built something special here. If you're tired of AI that impresses in demos but fails in practice, this is your path to building agents that actually deliver. ""}, {'type': 'link', 'href': 'https://huggingface.co/learn/agents-course/unit0/introduction', 'raw': 'https://huggingface.co/learn/agents-course/unit0/introduction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Best part? There's the MCP course next!"", 'raw': ""Best part? There's the MCP course next!""}]","Just completed the AI Agents course and wow, that capstone project really makes you understand how to build agents that can handle real-world complexity! + +The final project uses the GAIA dataset - your agent has to solve tasks like analyzing Excel files, processing audio recordings, answering questions about YouTube videos, and diving into research papers. This isn't toy examples, it's the messy, multimodal stuff agents need to handle in practice. + +Whether you’re just getting started with agents or want to go deeper with tools like LangChain, LlamaIndex, and SmolAgents, this course has tons of useful stuff. A few key insights: +- Code agents are incredibly versatile once you get the architecture right +- The sweet spot is finding the right balance of guidance vs autonomy for each use case +- Once the logic clicks, the possibilities really are endless - it's like letting LLMs break free from the chatbox + +The course is free and the certification deadline is July 1st, 2025. + +The Hugging Face team built something special here. If you're tired of AI that impresses in demos but fails in practice, this is your path to building agents that actually deliver. https://huggingface.co/learn/agents-course/unit0/introduction + +Best part? There's the MCP course next!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/BCncSXKfCl6gDJoI8i8-k.webp'}]",[],"[{'reaction': '❤️', 'users': ['WJ88', 'MohammedEltoum', 'RashMont', 'yusupwinata', 'John6666', 'DrSamir', 'KingNish', 'VicVic0524', 'pradiptadeb90', 'Harjinder27', 'AdinaY', 'KneeSama', 'victor', 'Moibe', 'LeoBorai', 'mause123', 'Chroma111', 'kamorou', 'chezhian'], 'count': 19}, {'reaction': '😎', 'users': ['John6666', 'mause123', 'chezhian'], 'count': 3}, {'reaction': '👀', 'users': ['VicVic0524', 'mause123', 'chezhian'], 'count': 3}, {'reaction': '➕', 'users': ['mause123', 'chezhian'], 'count': 2}, {'reaction': '🔥', 'users': ['mause123', 'chezhian'], 'count': 2}, {'reaction': '🤗', 'users': ['Chroma111', 'chezhian'], 'count': 2}]",2025-05-26 19:56:23,2025-05-26 19:56:23.534,[],/posts/fdaudens/719212082746895,3876,"{'language': 'en', 'probability': 0.9356042146682739}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/658a4c914bb41498f7d5e3ca/zMJjxfazi9ePc7GZ1jRAE.jpeg,66.0,Pro Creations,ProCreations,321100188234240,"[{'type': 'text', 'value': 'Eyyyy 50 followers 🤯', 'raw': 'Eyyyy 50 followers 🤯'}]",Eyyyy 50 followers 🤯,[],[],"[{'reaction': '🚀', 'users': ['John6666', 'VicVic0524', 'Clausss', 'sbrandeis', 'ZennyKenny', 'DavidGF', 'Ohadnr-boards', 'victor', 'AtAndDev', 'clem', 'HazDav', 'RobAgrees', 'nyuuzyou'], 'count': 13}]",2025-05-26 17:50:13,2025-05-27 09:53:38.875,"[{'_id': '6826452c8caf98415c35a51e', 'avatarUrl': '/avatars/8539065f7ead4ec64a57a50c8a6f77c9.svg', 'fullname': 'ledraa', 'name': 'theledraa', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/ProCreations/321100188234240,2911,"{'language': 'sq', 'probability': 0.20455364882946014}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,689443142454195,"[{'type': 'text', 'value': 'Orsta 🔥 vision language models trained with V-Triune, a unified reinforcement learning system by MiniMax AI', 'raw': 'Orsta 🔥 vision language models trained with V-Triune, a unified reinforcement learning system by MiniMax AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'One-RL-to-See-Them-All/one-rl-to-see-them-all-6833d27abce23898b2f9815a'}, 'url': 'https://huggingface.co/collections/One-RL-to-See-Them-All/one-rl-to-see-them-all-6833d27abce23898b2f9815a', 'raw': 'https://huggingface.co/collections/One-RL-to-See-Them-All/one-rl-to-see-them-all-6833d27abce23898b2f9815a'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 7B & 32B with MIT license', 'raw': '✨ 7B & 32B with MIT license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Masters 8 visual tasks: math, science QA, charts, puzzles, object detection, grounding, OCR, and counting ', 'raw': '✨ Masters 8 visual tasks: math, science QA, charts, puzzles, object detection, grounding, OCR, and counting '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Uses Dynamic IoU rewards for better visual understanding ', 'raw': '✨ Uses Dynamic IoU rewards for better visual understanding '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Strong performance in visual reasoning and perception', 'raw': '✨Strong performance in visual reasoning and perception'}, {'type': 'new_line', 'raw': '\n'}]","Orsta 🔥 vision language models trained with V-Triune, a unified reinforcement learning system by MiniMax AI + +https://huggingface.co/collections/One-RL-to-See-Them-All/one-rl-to-see-them-all-6833d27abce23898b2f9815a + +✨ 7B & 32B with MIT license +✨ Masters 8 visual tasks: math, science QA, charts, puzzles, object detection, grounding, OCR, and counting +✨ Uses Dynamic IoU rewards for better visual understanding +✨Strong performance in visual reasoning and perception +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/iDTBwKUX3TugPc0Ur5TV7.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['prithivMLmods', 'John6666', 'AtAndDev', 'VicVic0524', 'hilda1234'], 'count': 5}, {'reaction': '👍', 'users': ['sikang99', 'AtAndDev', 'alibave491'], 'count': 3}, {'reaction': '❤️', 'users': ['yo', 'AtAndDev'], 'count': 2}]",2025-05-26 14:40:44,2025-05-26 14:40:44.037,[],/posts/AdinaY/689443142454195,2846,"{'language': 'en', 'probability': 0.7862288355827332}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64257c616d0f0f5f1dc6aa2a/WNXC2PcyDn-jt9ZY5Rbka.jpeg,3289.0,Joffrey THOMAS,Jofthomas,656021086131496,"[{'type': 'text', 'value': 'Meet our new agentic model : 𝗗𝗲𝘃𝘀𝘁𝗿𝗮𝗹', 'raw': 'Meet our new agentic model : 𝗗𝗲𝘃𝘀𝘁𝗿𝗮𝗹'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Devstral is an open-source LLM built software engineering tasks built under a collaboration between Mistral AI and All Hands AI 🙌.', 'raw': 'Devstral is an open-source LLM built software engineering tasks built under a collaboration between Mistral AI and All Hands AI 🙌.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗞𝗲𝘆 𝗳𝗲𝗮𝘁𝘂𝗿𝗲𝘀 :', 'raw': '𝗞𝗲𝘆 𝗳𝗲𝗮𝘁𝘂𝗿𝗲𝘀 :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🤖 𝗔𝗴𝗲𝗻𝘁𝘀 : perfect for Agentic coding', 'raw': '• 🤖 𝗔𝗴𝗲𝗻𝘁𝘀 : perfect for Agentic coding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🍃 𝗹𝗶𝗴𝗵𝘁𝘄𝗲𝗶𝗴𝗵𝘁: Devstral is a 𝟮𝟰𝗕 parameter based on Mistral small. ', 'raw': '• 🍃 𝗹𝗶𝗴𝗵𝘁𝘄𝗲𝗶𝗴𝗵𝘁: Devstral is a 𝟮𝟰𝗕 parameter based on Mistral small. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• ©️ 𝗔𝗽𝗮𝗰𝗵𝗲 𝟮.𝟬, meaning fully open-source !', 'raw': '• ©️ 𝗔𝗽𝗮𝗰𝗵𝗲 𝟮.𝟬, meaning fully open-source !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 📄 A 𝟭𝟮𝟴𝗸 context window.', 'raw': '• 📄 A 𝟭𝟮𝟴𝗸 context window.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚Blog : ', 'raw': '📚Blog : '}, {'type': 'link', 'href': 'https://mistral.ai/news/devstral', 'raw': 'https://mistral.ai/news/devstral'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡API : The model is also available on our API under the name 𝗱𝗲𝘃𝘀𝘁𝗿𝗮𝗹-𝘀𝗺𝗮𝗹𝗹-𝟮𝟱𝟬𝟱', 'raw': '⚡API : The model is also available on our API under the name 𝗱𝗲𝘃𝘀𝘁𝗿𝗮𝗹-𝘀𝗺𝗮𝗹𝗹-𝟮𝟱𝟬𝟱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 repo : ', 'raw': '🤗 repo : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'mistralai/Devstral-Small-2505'}, 'url': 'https://huggingface.co/mistralai/Devstral-Small-2505', 'raw': 'https://huggingface.co/mistralai/Devstral-Small-2505'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Can't wait to see what you will build with it !"", 'raw': ""Can't wait to see what you will build with it !""}]","Meet our new agentic model : 𝗗𝗲𝘃𝘀𝘁𝗿𝗮𝗹 + +Devstral is an open-source LLM built software engineering tasks built under a collaboration between Mistral AI and All Hands AI 🙌. + +𝗞𝗲𝘆 𝗳𝗲𝗮𝘁𝘂𝗿𝗲𝘀 : +• 🤖 𝗔𝗴𝗲𝗻𝘁𝘀 : perfect for Agentic coding +• 🍃 𝗹𝗶𝗴𝗵𝘁𝘄𝗲𝗶𝗴𝗵𝘁: Devstral is a 𝟮𝟰𝗕 parameter based on Mistral small. +• ©️ 𝗔𝗽𝗮𝗰𝗵𝗲 𝟮.𝟬, meaning fully open-source ! +• 📄 A 𝟭𝟮𝟴𝗸 context window. + +📚Blog : https://mistral.ai/news/devstral +⚡API : The model is also available on our API under the name 𝗱𝗲𝘃𝘀𝘁𝗿𝗮𝗹-𝘀𝗺𝗮𝗹𝗹-𝟮𝟱𝟬𝟱 +🤗 repo : https://huggingface.co/mistralai/Devstral-Small-2505 + +Can't wait to see what you will build with it !",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'jqop', 'Pentium95', 'victor', 'AtAndDev'], 'count': 5}, {'reaction': '👍', 'users': ['kiuckhuang', 'cob05', 'AtAndDev', 'QuasiSpecies'], 'count': 4}]",2025-05-21 14:22:57,2025-05-23 14:27:41.667,"[{'_id': '67f26a4155fe17a33d1d3ab4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/dZBRp3QVJfzLxDFvMFwgf.png', 'fullname': 'Vijay Manickam', 'name': 'Vijay-Manickam', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Jofthomas/656021086131496,3281,"{'language': 'en', 'probability': 0.7410784959793091}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1641203017724-noauth.png,138.0,Joao Gante,joaogante,683077145456278,"[{'type': 'text', 'value': ""Let's go! Custom generation code has landed in "", 'raw': ""Let's go! Custom generation code has landed in ""}, {'type': 'inline_code', 'code': 'transformers', 'raw': '`transformers`'}, {'type': 'text', 'value': ' 🚀', 'raw': ' 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Have you designed a new cool KV cache? Maybe you're comparing new test-time compute ideas you've been researching? Have you found a way to do diffusion with existing models? You can now easily share your findings with the community with custom generation code, sharing the well-known "", 'raw': ""Have you designed a new cool KV cache? Maybe you're comparing new test-time compute ideas you've been researching? Have you found a way to do diffusion with existing models? You can now easily share your findings with the community with custom generation code, sharing the well-known ""}, {'type': 'inline_code', 'code': 'generate', 'raw': '`generate`'}, {'type': 'text', 'value': ' interface 🤓', 'raw': ' interface 🤓'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In a nutshell, we have expanded the support of custom modeling code on the Hub with *model-agnostic* custom generation code. Write for one model, reuse with any model -- hopefully, this will democratize access to new generation ideas \U0001fae1', 'raw': 'In a nutshell, we have expanded the support of custom modeling code on the Hub with *model-agnostic* custom generation code. Write for one model, reuse with any model -- hopefully, this will democratize access to new generation ideas \U0001fae1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As a creator, you gain the ability to get your ideas in ', 'raw': 'As a creator, you gain the ability to get your ideas in '}, {'type': 'inline_code', 'code': 'transformers', 'raw': '`transformers`'}, {'type': 'text', 'value': "" with minimal effort. You'll also have access to all Hub features: a landing page for your creation, discussions, usage metrics, ... 🤓"", 'raw': "" with minimal effort. You'll also have access to all Hub features: a landing page for your creation, discussions, usage metrics, ... 🤓""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💎 Resources 💎', 'raw': '💎 Resources 💎'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- docs: ', 'raw': '- docs: '}, {'type': 'link', 'href': 'https://huggingface.co/docs/transformers/generation_strategies#custom-decoding-methods', 'raw': 'https://huggingface.co/docs/transformers/generation_strategies#custom-decoding-methods'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- minimal example: ', 'raw': '- minimal example: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'transformers-community/custom_generate_example'}, 'url': 'https://huggingface.co/transformers-community/custom_generate_example', 'raw': 'https://huggingface.co/transformers-community/custom_generate_example'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- discussion: ', 'raw': '- discussion: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'transformers-community/support', 'discussionNum': 10}, 'url': 'https://huggingface.co/spaces/transformers-community/support/discussions/10', 'raw': 'https://huggingface.co/spaces/transformers-community/support/discussions/10'}]","Let's go! Custom generation code has landed in `transformers` 🚀 + +Have you designed a new cool KV cache? Maybe you're comparing new test-time compute ideas you've been researching? Have you found a way to do diffusion with existing models? You can now easily share your findings with the community with custom generation code, sharing the well-known `generate` interface 🤓 + +In a nutshell, we have expanded the support of custom modeling code on the Hub with *model-agnostic* custom generation code. Write for one model, reuse with any model -- hopefully, this will democratize access to new generation ideas 🫡 + +As a creator, you gain the ability to get your ideas in `transformers` with minimal effort. You'll also have access to all Hub features: a landing page for your creation, discussions, usage metrics, ... 🤓 + +💎 Resources 💎 +- docs: https://huggingface.co/docs/transformers/generation_strategies#custom-decoding-methods +- minimal example: https://huggingface.co/transformers-community/custom_generate_example +- discussion: https://huggingface.co/spaces/transformers-community/support/discussions/10",[],[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}]",2025-05-21 10:13:12,2025-05-21 10:13:12.925,[],/posts/joaogante/683077145456278,486,"{'language': 'en', 'probability': 0.8424704670906067}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/z82tUCF_X18mMaP7m0NCQ.png,924.0,openfree,openfree,102455854917725,"[{'type': 'text', 'value': '🌾 NH Prediction: AI System for Korean Agricultural Price Forecasting 🌾', 'raw': '🌾 NH Prediction: AI System for Korean Agricultural Price Forecasting 🌾'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Project Introduction', 'raw': '📊 Project Introduction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Price volatility in agricultural markets has significant impacts from producers to consumers! NH Prediction is an innovative system that utilizes cutting-edge AI technology to predict Korean agricultural wholesale prices based on extensive data spanning 40 years. 🚀', 'raw': 'Price volatility in agricultural markets has significant impacts from producers to consumers! NH Prediction is an innovative system that utilizes cutting-edge AI technology to predict Korean agricultural wholesale prices based on extensive data spanning 40 years. 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'VIDraft/NH-Prediction'}, 'url': 'https://huggingface.co/spaces/VIDraft/NH-Prediction', 'raw': 'https://huggingface.co/spaces/VIDraft/NH-Prediction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/NH-Korea'}, 'url': 'https://huggingface.co/spaces/ginipick/NH-Korea', 'raw': 'https://huggingface.co/spaces/ginipick/NH-Korea'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🧠 VIDraft's 14 Enhanced Prediction Models"", 'raw': ""🧠 VIDraft's 14 Enhanced Prediction Models""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The VIDraft research team has developed 14 advanced prediction models by reinforcing existing forecasting approaches:', 'raw': 'The VIDraft research team has developed 14 advanced prediction models by reinforcing existing forecasting approaches:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔮 VID-SARIMA Series: Precisely models seasonality and trends (up to 99.99% accuracy)', 'raw': '🔮 VID-SARIMA Series: Precisely models seasonality and trends (up to 99.99% accuracy)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚖️ VID-ETS Series: Captures multiplicative/additive variation patterns', 'raw': '⚖️ VID-ETS Series: Captures multiplicative/additive variation patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📈 VID-Holt/Holt-Winters: Simultaneous analysis of linear trends and seasonality', 'raw': '📈 VID-Holt/Holt-Winters: Simultaneous analysis of linear trends and seasonality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📉 VID-MovingAverage/WeightedMA: Noise removal and medium-term trend identification', 'raw': '📉 VID-MovingAverage/WeightedMA: Noise removal and medium-term trend identification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 VID-Fourier+LR: Hybrid approach capturing complex periodicity', 'raw': '🔍 VID-Fourier+LR: Hybrid approach capturing complex periodicity'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 Item-Specific Optimization: Customized predictions for each agricultural product (rice, cabbage, apples, and 50+ more)', 'raw': '🌟 Item-Specific Optimization: Customized predictions for each agricultural product (rice, cabbage, apples, and 50+ more)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔄 Ensemble Approach: Enhanced prediction robustness by combining top models', 'raw': '🔄 Ensemble Approach: Enhanced prediction robustness by combining top models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 Bilingual Support: Korean/English interfaces', 'raw': '📱 Bilingual Support: Korean/English interfaces'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗓️ Integrated Forecast Periods: Simultaneous long-term and short-term predictions', 'raw': '🗓️ Integrated Forecast Periods: Simultaneous long-term and short-term predictions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Advanced Visualization', 'raw': '📊 Advanced Visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 Use Cases', 'raw': '💡 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👨\u200d🌾 Producers: Crop planning optimization, shipping timing adjustment ', 'raw': '👨\u200d🌾 Producers: Crop planning optimization, shipping timing adjustment '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏬 Distributors: Inventory management, purchase planning optimization', 'raw': '🏬 Distributors: Inventory management, purchase planning optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏛️ Policymakers: Market stabilization policy development, early warning for supply-demand imbalances', 'raw': '🏛️ Policymakers: Market stabilization policy development, early warning for supply-demand imbalances'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💰 Financial Institutions: Agricultural investment decisions, derivative product pricing', 'raw': '💰 Financial Institutions: Agricultural investment decisions, derivative product pricing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Prediction Accuracy', 'raw': '🔍 Prediction Accuracy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💯 High Accuracy: Achieving 99%+ accuracy for major items', 'raw': '💯 High Accuracy: Achieving 99%+ accuracy for major items'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔄 Seasonality Recognition: Precisely captures monthly price variation patterns', 'raw': '🔄 Seasonality Recognition: Precisely captures monthly price variation patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📌 Confidence Intervals: Decision support through prediction uncertainty visualization', 'raw': '📌 Confidence Intervals: Decision support through prediction uncertainty visualization'}]","🌾 NH Prediction: AI System for Korean Agricultural Price Forecasting 🌾 + +📊 Project Introduction +Price volatility in agricultural markets has significant impacts from producers to consumers! NH Prediction is an innovative system that utilizes cutting-edge AI technology to predict Korean agricultural wholesale prices based on extensive data spanning 40 years. 🚀 + +https://huggingface.co/spaces/VIDraft/NH-Prediction +https://huggingface.co/spaces/ginipick/NH-Korea + +🧠 VIDraft's 14 Enhanced Prediction Models +The VIDraft research team has developed 14 advanced prediction models by reinforcing existing forecasting approaches: + +🔮 VID-SARIMA Series: Precisely models seasonality and trends (up to 99.99% accuracy) +⚖️ VID-ETS Series: Captures multiplicative/additive variation patterns +📈 VID-Holt/Holt-Winters: Simultaneous analysis of linear trends and seasonality +📉 VID-MovingAverage/WeightedMA: Noise removal and medium-term trend identification +🔍 VID-Fourier+LR: Hybrid approach capturing complex periodicity + +✨ Key Features + +🌟 Item-Specific Optimization: Customized predictions for each agricultural product (rice, cabbage, apples, and 50+ more) +🔄 Ensemble Approach: Enhanced prediction robustness by combining top models +📱 Bilingual Support: Korean/English interfaces +🗓️ Integrated Forecast Periods: Simultaneous long-term and short-term predictions +📊 Advanced Visualization + +💡 Use Cases + +👨‍🌾 Producers: Crop planning optimization, shipping timing adjustment +🏬 Distributors: Inventory management, purchase planning optimization +🏛️ Policymakers: Market stabilization policy development, early warning for supply-demand imbalances +💰 Financial Institutions: Agricultural investment decisions, derivative product pricing + +🔍 Prediction Accuracy + +💯 High Accuracy: Achieving 99%+ accuracy for major items +🔄 Seasonality Recognition: Precisely captures monthly price variation patterns +📌 Confidence Intervals: Decision support through prediction uncertainty visualization","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66e54edddba1e4fee4500a5a/ctN3Q_yv3WpfgleFVqnl-.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66e54edddba1e4fee4500a5a/LrRzd5NimMoWnaDIiRSnK.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66e54edddba1e4fee4500a5a/qpY3sXIgWKBITA__NDsqU.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['openfree', 'sanzwarawesme', 'hackstone', 'chrissamt', 'tegaclara', 'mikedekatiny', 'truelooper', '23villanw', 'ginipick', 'nvmhtfile', 'lmdfilesx', 'viidfedenwad', 'samturrets', 'japojapa', 'trinitycap', 'soulbariansz', 'seawolf2357', 'espelesito', 'dekatoridev', 'ausntmarzi', 'solarbeams', 'John6666', 'esgransp', 'darkzbaron', 'nikecomboset', 'mahat07', 'x2501', 'imposterRED', 'aiqcamp', 'aiqtech'], 'count': 30}, {'reaction': '🚀', 'users': ['openfree', 'sanzwarawesme', 'hackstone', 'mikedekatiny', 'truelooper', '23villanw', 'ginipick', 'samturrets', 'solarbeams', 'nikecomboset'], 'count': 10}, {'reaction': '👀', 'users': ['openfree', 'hackstone', 'ginipick', 'solarbeams', 'Cluelesstalk', 'SosaJhons'], 'count': 6}, {'reaction': '👍', 'users': ['openfree', 'lmdfilesx', 'alexdjisseglo71', 'danhtran2mind'], 'count': 4}, {'reaction': '🤗', 'users': ['openfree'], 'count': 1}, {'reaction': '❤️', 'users': ['lmdfilesx'], 'count': 1}]",2025-05-21 09:10:39,2025-05-21 09:10:39.079,[],/posts/openfree/102455854917725,3509,"{'language': 'en', 'probability': 0.8288448452949524}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,709690582361356,"[{'type': 'text', 'value': 'ByteDance is absolutely cooking lately🔥', 'raw': 'ByteDance is absolutely cooking lately🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'BAGEL 🥯 7B active parameter open multimodal foundation model by Bytedance Seed team.', 'raw': 'BAGEL 🥯 7B active parameter open multimodal foundation model by Bytedance Seed team.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ByteDance-Seed/BAGEL-7B-MoT'}, 'url': 'https://huggingface.co/ByteDance-Seed/BAGEL-7B-MoT', 'raw': 'https://huggingface.co/ByteDance-Seed/BAGEL-7B-MoT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Apache 2.0', 'raw': '✨ Apache 2.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Outperforms top VLMs (Qwen2.5-VL & InternVL-2.5)', 'raw': '✨ Outperforms top VLMs (Qwen2.5-VL & InternVL-2.5)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Mixture-of-Transformer-Experts + dual encoders', 'raw': '✨ Mixture-of-Transformer-Experts + dual encoders'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Trained on trillions of interleaved tokens', 'raw': '✨ Trained on trillions of interleaved tokens'}, {'type': 'new_line', 'raw': '\n'}]","ByteDance is absolutely cooking lately🔥 + +BAGEL 🥯 7B active parameter open multimodal foundation model by Bytedance Seed team. + +https://huggingface.co/ByteDance-Seed/BAGEL-7B-MoT + +✨ Apache 2.0 +✨ Outperforms top VLMs (Qwen2.5-VL & InternVL-2.5) +✨ Mixture-of-Transformer-Experts + dual encoders +✨ Trained on trillions of interleaved tokens +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/H7NGn4Yet86ZorNy-acB0.webp'}]",[],"[{'reaction': '🚀', 'users': ['merve', 'John6666', 'linoyts', 'loubnabnl', 'DmitryRyumin', 'davidrd123'], 'count': 6}, {'reaction': '🔥', 'users': ['merve', 'iky1e', 'loubnabnl', 'davidrd123', 'alibave491'], 'count': 5}]",2025-05-21 09:01:30,2025-05-21 09:01:30.519,[],/posts/AdinaY/709690582361356,2807,"{'language': 'en', 'probability': 0.5879154801368713}",0 +/avatars/3c2e5c8ddb24959207527de885761da5.svg,6.0,Pranav Upadhyaya,pranavupadhyaya52,196223267192478,"[{'type': 'text', 'value': ""Hello everyone. I've built a medical AI assistant application."", 'raw': ""Hello everyone. I've built a medical AI assistant application.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'pranavupadhyaya52/MediWiki_Medical_Assistant'}, 'url': 'https://huggingface.co/spaces/pranavupadhyaya52/MediWiki_Medical_Assistant', 'raw': 'https://huggingface.co/spaces/pranavupadhyaya52/MediWiki_Medical_Assistant'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' It is a multimodal chatbot and can accept text, radiology images, prescription and lab reports (currently it only accepts one image per chat.) and audio files (wav and MP3 extension files). ', 'raw': ' It is a multimodal chatbot and can accept text, radiology images, prescription and lab reports (currently it only accepts one image per chat.) and audio files (wav and MP3 extension files). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It is built on top of a finetuned Llama 3.2 11B vision instruct. It also uses a 41000 medically related question answer pair stored in the form of chromadb embedding for Retrieval Augmented Generation (RAG). ', 'raw': 'It is built on top of a finetuned Llama 3.2 11B vision instruct. It also uses a 41000 medically related question answer pair stored in the form of chromadb embedding for Retrieval Augmented Generation (RAG). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Please let me know your thoughts on my project and how I can improve it further. Thank you.', 'raw': 'Please let me know your thoughts on my project and how I can improve it further. Thank you.'}]","Hello everyone. I've built a medical AI assistant application. + +https://huggingface.co/spaces/pranavupadhyaya52/MediWiki_Medical_Assistant + + It is a multimodal chatbot and can accept text, radiology images, prescription and lab reports (currently it only accepts one image per chat.) and audio files (wav and MP3 extension files). + +It is built on top of a finetuned Llama 3.2 11B vision instruct. It also uses a 41000 medically related question answer pair stored in the form of chromadb embedding for Retrieval Augmented Generation (RAG). + +Please let me know your thoughts on my project and how I can improve it further. Thank you.",[],[],"[{'reaction': '🔥', 'users': ['Seratonia', 'nqzfaizal77ai', 'Pushpendrasinghparmar', 'John6666', 'Dcas89', 'ronedgecomb'], 'count': 6}]",2025-05-21 01:54:03,2025-06-11 16:38:48.313,"[{'_id': '682c9f0bae3d8954ac57bc9d', 'avatarUrl': '/avatars/c489870034e08dc5ace3eed970b234d7.svg', 'fullname': 'Parmar', 'name': 'Pushpendrasinghparmar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 0, 'isFollowing': False}, {'_id': '6806616ff8cc817f1413b970', 'avatarUrl': '/avatars/3c2e5c8ddb24959207527de885761da5.svg', 'fullname': 'Pranav Upadhyaya', 'name': 'pranavupadhyaya52', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6, 'isFollowing': False}, {'_id': '67f4d1ff4503b12888d321f4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/KV9guSL1ST81gjbem6oM3.png', 'fullname': 'Yasein Shikhada', 'name': 'Ironman1123', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/pranavupadhyaya52/196223267192478,2187,"{'language': 'en', 'probability': 0.8841228485107422}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,454841394174188,"[{'type': 'text', 'value': 'Video link : ', 'raw': 'Video link : '}, {'type': 'link', 'href': 'https://youtu.be/gFMUChHgXYk', 'raw': 'https://youtu.be/gFMUChHgXYk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'VEO 3 AI Video Generation is Literally Insane with Perfect Audio! - 60 User Generated Wild Examples - Finally We can Expect Native Audio Supported Open Source Video Gen Models', 'raw': 'VEO 3 AI Video Generation is Literally Insane with Perfect Audio! - 60 User Generated Wild Examples - Finally We can Expect Native Audio Supported Open Source Video Gen Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video link : ', 'raw': 'Video link : '}, {'type': 'link', 'href': 'https://youtu.be/gFMUChHgXYk', 'raw': 'https://youtu.be/gFMUChHgXYk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Google Unveils Veo 3: A Game-Changing AI Video Generator with Native Audio', 'raw': 'Google Unveils Veo 3: A Game-Changing AI Video Generator with Native Audio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Published May 21, 2025', 'raw': 'Published May 21, 2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'At Google I/O 2025, held on May 20–21 in Mountain View, California, Google announced the launch of Veo 3, its latest AI-powered video generation model, marking a significant leap in creative technology. This third-generation model, succeeding Veo 2, introduces groundbreaking features, most notably its ability to generate high-quality videos with integrated audio, including dialogue, sound effects, and ambient noise. Alongside Veo 3, Google introduced Flow, a new AI-driven video editing suite designed to empower filmmakers and content creators. Here’s an in-depth look at Veo 3, its capabilities, and its potential to reshape AI filmmaking.', 'raw': 'At Google I/O 2025, held on May 20–21 in Mountain View, California, Google announced the launch of Veo 3, its latest AI-powered video generation model, marking a significant leap in creative technology. This third-generation model, succeeding Veo 2, introduces groundbreaking features, most notably its ability to generate high-quality videos with integrated audio, including dialogue, sound effects, and ambient noise. Alongside Veo 3, Google introduced Flow, a new AI-driven video editing suite designed to empower filmmakers and content creators. Here’s an in-depth look at Veo 3, its capabilities, and its potential to reshape AI filmmaking.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What is Veo 3?', 'raw': 'What is Veo 3?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Veo 3 is Google’s state-of-the-art video generation model, built to create short video clips from text or image prompts. Unlike its predecessors and competitors like OpenAI’s Sora, Veo 3 stands out for its ability to produce videos with synchronized audio, such as character dialogue, background traffic noise, or birds chirping in a park scene. This advancement addresses a long-standing limitation in AI video generation, where tools could create realistic visuals but lacked integrated sound, often requiring separate audio editing.', 'raw': 'Veo 3 is Google’s state-of-the-art video generation model, built to create short video clips from text or image prompts. Unlike its predecessors and competitors like OpenAI’s Sora, Veo 3 stands out for its ability to produce videos with synchronized audio, such as character dialogue, background traffic noise, or birds chirping in a park scene. This advancement addresses a long-standing limitation in AI video generation, where tools could create realistic visuals but lacked integrated sound, often requiring separate audio editing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The model excels in prompt adherence, accurately interpreting complex instructions to generate scenes with realistic physics, consistent movement, and precise lip-syncing for dialogue. Google claims Veo 3 delivers higher-quality video content than Veo 2, with improved rendering of intricate details like fabrics, water, and animal fur when paired with Google’s new image generation model, Imagen 4.', 'raw': 'The model excels in prompt adherence, accurately interpreting complex instructions to generate scenes with realistic physics, consistent movement, and precise lip-syncing for dialogue. Google claims Veo 3 delivers higher-quality video content than Veo 2, with improved rendering of intricate details like fabrics, water, and animal fur when paired with Google’s new image generation model, Imagen 4.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Video link : https://youtu.be/gFMUChHgXYk + +VEO 3 AI Video Generation is Literally Insane with Perfect Audio! - 60 User Generated Wild Examples - Finally We can Expect Native Audio Supported Open Source Video Gen Models + +Video link : https://youtu.be/gFMUChHgXYk + +Google Unveils Veo 3: A Game-Changing AI Video Generator with Native Audio +Published May 21, 2025 + +At Google I/O 2025, held on May 20–21 in Mountain View, California, Google announced the launch of Veo 3, its latest AI-powered video generation model, marking a significant leap in creative technology. This third-generation model, succeeding Veo 2, introduces groundbreaking features, most notably its ability to generate high-quality videos with integrated audio, including dialogue, sound effects, and ambient noise. Alongside Veo 3, Google introduced Flow, a new AI-driven video editing suite designed to empower filmmakers and content creators. Here’s an in-depth look at Veo 3, its capabilities, and its potential to reshape AI filmmaking. + +What is Veo 3? +Veo 3 is Google’s state-of-the-art video generation model, built to create short video clips from text or image prompts. Unlike its predecessors and competitors like OpenAI’s Sora, Veo 3 stands out for its ability to produce videos with synchronized audio, such as character dialogue, background traffic noise, or birds chirping in a park scene. This advancement addresses a long-standing limitation in AI video generation, where tools could create realistic visuals but lacked integrated sound, often requiring separate audio editing. + +The model excels in prompt adherence, accurately interpreting complex instructions to generate scenes with realistic physics, consistent movement, and precise lip-syncing for dialogue. Google claims Veo 3 delivers higher-quality video content than Veo 2, with improved rendering of intricate details like fabrics, water, and animal fur when paired with Google’s new image generation model, Imagen 4. + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/D-2dT0AJtjvZJ_Z7IDckf.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/yeF_Q2mvmmLmsKato6G6f.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/KaQwiFUg9VEBFdcA1pji3.png'}]",[],"[{'reaction': '👀', 'users': ['MonsterMMORPG', 'ermac1987', 'John6666', 'jealejandro', 'ronedgecomb'], 'count': 5}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'ermac1987', 'gyOliver'], 'count': 3}, {'reaction': '🔥', 'users': ['MonsterMMORPG', 'ermac1987'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'ermac1987'], 'count': 2}, {'reaction': '🤗', 'users': ['MonsterMMORPG', 'ermac1987'], 'count': 2}, {'reaction': '😎', 'users': ['MonsterMMORPG', 'ermac1987'], 'count': 2}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'Madadan'], 'count': 2}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-05-20 23:54:38,2025-05-21 15:50:36.539,"[{'_id': '676629e3bc1e4358c018373a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4Dh7x5WcAr1-ebLrrzJFq.jpeg', 'fullname': 'Esinetverse', 'name': 'Esicoin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/MonsterMMORPG/454841394174188,2374,"{'language': 'en', 'probability': 0.8523218631744385}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png,122.0,Asankhaya Sharma,codelion,735622263233891,"[{'type': 'text', 'value': ""🧬 Hey everyone! Just released **OpenEvolve** - an open-source implementation of Google DeepMind's AlphaEvolve system."", 'raw': ""🧬 Hey everyone! Just released **OpenEvolve** - an open-source implementation of Google DeepMind's AlphaEvolve system.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's an evolutionary coding agent that uses LLMs to discover and optimize algorithms. I successfully replicated DeepMind's results on circle packing (99.97% match!) and evolved a random search into a simulated annealing algorithm."", 'raw': ""It's an evolutionary coding agent that uses LLMs to discover and optimize algorithms. I successfully replicated DeepMind's results on circle packing (99.97% match!) and evolved a random search into a simulated annealing algorithm.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key features:', 'raw': '✨ Key features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Evolves entire codebases (not just single functions)', 'raw': '- Evolves entire codebases (not just single functions)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Works with any OpenAI-compatible API', 'raw': '- Works with any OpenAI-compatible API'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LLM ensemble approach for better results', 'raw': '- LLM ensemble approach for better results'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multi-objective optimization', 'raw': '- Multi-objective optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Check it out:', 'raw': '👉 Check it out:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://github.com/codelion/openevolve', 'raw': 'https://github.com/codelion/openevolve'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog post: ', 'raw': 'Blog post: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/codelion/openevolve', 'raw': 'https://huggingface.co/blog/codelion/openevolve'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Would love to hear your thoughts or answer any questions about it!', 'raw': 'Would love to hear your thoughts or answer any questions about it!'}]","🧬 Hey everyone! Just released **OpenEvolve** - an open-source implementation of Google DeepMind's AlphaEvolve system. + +It's an evolutionary coding agent that uses LLMs to discover and optimize algorithms. I successfully replicated DeepMind's results on circle packing (99.97% match!) and evolved a random search into a simulated annealing algorithm. + +✨ Key features: +- Evolves entire codebases (not just single functions) +- Works with any OpenAI-compatible API +- LLM ensemble approach for better results +- Multi-objective optimization + +👉 Check it out: +GitHub: https://github.com/codelion/openevolve +Blog post: https://huggingface.co/blog/codelion/openevolve + +Would love to hear your thoughts or answer any questions about it!",[],[],"[{'reaction': '🔥', 'users': ['codelion', 'yalsaffar', 'dark-pen', 'knutole', 'KingNish', 'asdasd255433', 'Wesxdz'], 'count': 7}, {'reaction': '🚀', 'users': ['codelion', 'John6666', 'Araeynn', 'dar-tau', 'nomadicsynth'], 'count': 5}, {'reaction': '❤️', 'users': ['codelion', 'Willie999', 'Liamlllm'], 'count': 3}, {'reaction': '👍', 'users': ['JLouisBiz', 'codelion'], 'count': 2}, {'reaction': '👀', 'users': ['codelion'], 'count': 1}]",2025-05-20 23:04:59,2025-05-20 23:04:59.871,[],/posts/codelion/735622263233891,2847,"{'language': 'en', 'probability': 0.8138349056243896}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d50e9ef9cbfa798c590004/FlVe8chafigMfrPpMeJRL.jpeg,133.0,Jared Sulzdorf,jsulz,285043040409517,"[{'type': 'text', 'value': 'Heyo ', 'raw': 'Heyo '}, {'type': 'mention', 'user': 'RichardErkhov', 'raw': '@RichardErkhov'}, {'type': 'text', 'value': ' the ', 'raw': ' the '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'xet-team'}, 'url': 'https://huggingface.co/xet-team', 'raw': 'https://huggingface.co/xet-team', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66b05ca6e7c57eac7cafbbc4/f-BRRaSr0QLq3nHlLqD3o.png'}, {'type': 'text', 'value': ' at Hugging face was wondering if you wanted to join the fun and jump over to Xet storage. 🤗', 'raw': ' at Hugging face was wondering if you wanted to join the fun and jump over to Xet storage. 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We've been onboarding folks "", 'raw': ""We've been onboarding folks ""}, {'type': 'link', 'href': 'https://huggingface.co/blog/xet-on-the-hub', 'raw': 'https://huggingface.co/blog/xet-on-the-hub'}, {'type': 'text', 'value': ' know the backend can scale (Llama 4 and Qwen 3 are on Xet), is great for working with quants (see ', 'raw': ' know the backend can scale (Llama 4 and Qwen 3 are on Xet), is great for working with quants (see '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'xet-team/quantization-dedup'}, 'url': 'https://huggingface.co/spaces/xet-team/quantization-dedup', 'raw': 'https://huggingface.co/spaces/xet-team/quantization-dedup'}, {'type': 'text', 'value': "" ), and we're pushing on inviting impactful orgs and users on the Hub. You fit the bill. "", 'raw': "" ), and we're pushing on inviting impactful orgs and users on the Hub. You fit the bill. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We'd love to onboard you, get some feedback, and create some excitement 🎉"", 'raw': ""We'd love to onboard you, get some feedback, and create some excitement 🎉""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The steps are pretty straightforward - join the waitlist at hf.co/join/xet and we'll take care of the rest."", 'raw': ""The steps are pretty straightforward - join the waitlist at hf.co/join/xet and we'll take care of the rest.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The system is fully backward compatible, so you shouldn't notice a thing. BUT to get the best experience when uploading/downloading, make sure you have "", 'raw': ""The system is fully backward compatible, so you shouldn't notice a thing. BUT to get the best experience when uploading/downloading, make sure you have ""}, {'type': 'inline_code', 'code': 'hf_xet', 'raw': '`hf_xet`'}, {'type': 'text', 'value': ' installed alongside the latest ', 'raw': ' installed alongside the latest '}, {'type': 'inline_code', 'code': 'huggingface_hub', 'raw': '`huggingface_hub`'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What do you think?', 'raw': 'What do you think?'}]","Heyo @RichardErkhov the https://huggingface.co/xet-team at Hugging face was wondering if you wanted to join the fun and jump over to Xet storage. 🤗 + +We've been onboarding folks https://huggingface.co/blog/xet-on-the-hub know the backend can scale (Llama 4 and Qwen 3 are on Xet), is great for working with quants (see https://huggingface.co/spaces/xet-team/quantization-dedup ), and we're pushing on inviting impactful orgs and users on the Hub. You fit the bill. + +We'd love to onboard you, get some feedback, and create some excitement 🎉 + +The steps are pretty straightforward - join the waitlist at hf.co/join/xet and we'll take care of the rest. + +The system is fully backward compatible, so you shouldn't notice a thing. BUT to get the best experience when uploading/downloading, make sure you have `hf_xet` installed alongside the latest `huggingface_hub` + +What do you think?",[],"[{'_id': '62f8c910ebd15ad7b5afca7f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62f8c910ebd15ad7b5afca7f/rTjrwgzQtNV1f3lULgI8q.jpeg', 'fullname': 'Richard Erkhov', 'name': 'RichardErkhov', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 189}]","[{'reaction': '🔥', 'users': ['reach-vb', 'nicoboss', 'julien-c', 'John6666', 'RichardErkhov', 'pcuenq', 'doof-ferb'], 'count': 7}, {'reaction': '🤗', 'users': ['John6666', 'RichardErkhov', 'doof-ferb'], 'count': 3}, {'reaction': '🚀', 'users': ['RichardErkhov', 'doof-ferb'], 'count': 2}]",2025-05-20 23:00:42,2025-06-03 14:05:39.048,"[{'_id': '62f8c910ebd15ad7b5afca7f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62f8c910ebd15ad7b5afca7f/rTjrwgzQtNV1f3lULgI8q.jpeg', 'fullname': 'Richard Erkhov', 'name': 'RichardErkhov', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 189, 'isFollowing': False}, {'_id': '65d50e9ef9cbfa798c590004', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d50e9ef9cbfa798c590004/FlVe8chafigMfrPpMeJRL.jpeg', 'fullname': 'Jared Sulzdorf', 'name': 'jsulz', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 133, 'isFollowing': False}]",/posts/jsulz/285043040409517,2341,"{'language': 'en', 'probability': 0.9101844429969788}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png,30.0,GHOSTAI,ghostai1,152385069836833,"[{'type': 'text', 'value': '# Computer Vision societal impact: A Deep Dive', 'raw': '# Computer Vision societal impact: A Deep Dive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In the ever-evolving world of technology, one cannot deny the astonishing leap that Artificial Intelligence (AI) has made over the years. One of the most noticeable strides in this sector is the application of AI in computer vision, which has revolutionized industries across the globe. Let's delve into how AI-driven Computer Vision has steered society towards an innovative future."", 'raw': ""In the ever-evolving world of technology, one cannot deny the astonishing leap that Artificial Intelligence (AI) has made over the years. One of the most noticeable strides in this sector is the application of AI in computer vision, which has revolutionized industries across the globe. Let's delve into how AI-driven Computer Vision has steered society towards an innovative future.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""A key application of AI-driven Computer Vision is in the field of healthcare. With the help of this technology, doctors can now diagnose diseases at an earlier stage, thanks to the ability of AI to analyze medical images at a pace and accuracy that human eyes can't match. This has led to major breakthroughs in treatment, resulting in enhanced patient outcomes."", 'raw': ""A key application of AI-driven Computer Vision is in the field of healthcare. With the help of this technology, doctors can now diagnose diseases at an earlier stage, thanks to the ability of AI to analyze medical images at a pace and accuracy that human eyes can't match. This has led to major breakthroughs in treatment, resulting in enhanced patient outcomes.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Moreover, the retail sector has also benefitted greatly from this technology. With the integration of AI-driven Computer Vision, online shopping has become easier than ever. Customers can now order their desired products without even lifting a finger. The AI system uses computer vision to recognize and understand the items, making the shopping process more convenient.', 'raw': 'Moreover, the retail sector has also benefitted greatly from this technology. With the integration of AI-driven Computer Vision, online shopping has become easier than ever. Customers can now order their desired products without even lifting a finger. The AI system uses computer vision to recognize and understand the items, making the shopping process more convenient.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lastly, AI-driven Computer Vision has also had a profound impact on urban infrastructure. This technology enables smart cities by providing real-time data on traffic, weather, and fire alarms, among other things. This data allows city officials to make informed decisions and take necessary action to ensure public safety and efficiency.', 'raw': 'Lastly, AI-driven Computer Vision has also had a profound impact on urban infrastructure. This technology enables smart cities by providing real-time data on traffic, weather, and fire alarms, among other things. This data allows city officials to make informed decisions and take necessary action to ensure public safety and efficiency.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In conclusion, the incredible advancements in AI-driven Computer Vision have undoubtedly ushered in a new era of innovation. With its numerous applications across various sectors, this technology promises to continue steering society towards an even more advanced and connected future. So, buckle up and embrace the future of AI-driven Computer Vision, as it is here to stay.', 'raw': 'In conclusion, the incredible advancements in AI-driven Computer Vision have undoubtedly ushered in a new era of innovation. With its numerous applications across various sectors, this technology promises to continue steering society towards an even more advanced and connected future. So, buckle up and embrace the future of AI-driven Computer Vision, as it is here to stay.'}]","# Computer Vision societal impact: A Deep Dive + +In the ever-evolving world of technology, one cannot deny the astonishing leap that Artificial Intelligence (AI) has made over the years. One of the most noticeable strides in this sector is the application of AI in computer vision, which has revolutionized industries across the globe. Let's delve into how AI-driven Computer Vision has steered society towards an innovative future. + +A key application of AI-driven Computer Vision is in the field of healthcare. With the help of this technology, doctors can now diagnose diseases at an earlier stage, thanks to the ability of AI to analyze medical images at a pace and accuracy that human eyes can't match. This has led to major breakthroughs in treatment, resulting in enhanced patient outcomes. + +Moreover, the retail sector has also benefitted greatly from this technology. With the integration of AI-driven Computer Vision, online shopping has become easier than ever. Customers can now order their desired products without even lifting a finger. The AI system uses computer vision to recognize and understand the items, making the shopping process more convenient. + +Lastly, AI-driven Computer Vision has also had a profound impact on urban infrastructure. This technology enables smart cities by providing real-time data on traffic, weather, and fire alarms, among other things. This data allows city officials to make informed decisions and take necessary action to ensure public safety and efficiency. + +In conclusion, the incredible advancements in AI-driven Computer Vision have undoubtedly ushered in a new era of innovation. With its numerous applications across various sectors, this technology promises to continue steering society towards an even more advanced and connected future. So, buckle up and embrace the future of AI-driven Computer Vision, as it is here to stay.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-05-20 22:54:45,2025-05-20 22:54:45.425,[],/posts/ghostai1/152385069836833,235,"{'language': 'en', 'probability': 0.9498171806335449}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,419913536892543,"[{'type': 'text', 'value': ""Got access to Google's all-new Gemini Diffusion a state-of-the-art text diffusion model. It delivers the performance of Gemini 2.0 Flash-Lite at 5x the speed, generating over 1000 tokens in a fraction of a second and producing impressive results. Below are some initial outputs generated using the model. ♊🔥"", 'raw': ""Got access to Google's all-new Gemini Diffusion a state-of-the-art text diffusion model. It delivers the performance of Gemini 2.0 Flash-Lite at 5x the speed, generating over 1000 tokens in a fraction of a second and producing impressive results. Below are some initial outputs generated using the model. ♊🔥""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Gemini Diffusion Playground ✦ : ', 'raw': 'Gemini Diffusion Playground ✦ : '}, {'type': 'link', 'href': 'https://deepmind.google.com/frontiers/gemini-diffusion', 'raw': 'https://deepmind.google.com/frontiers/gemini-diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Get Access Here : ', 'raw': 'Get Access Here : '}, {'type': 'link', 'href': 'https://docs.google.com/forms/d/1aLm6J13tAkq4v4qwGR3z35W2qWy7mHiiA0wGEpecooo/viewform?edit_requested=true', 'raw': 'https://docs.google.com/forms/d/1aLm6J13tAkq4v4qwGR3z35W2qWy7mHiiA0wGEpecooo/viewform?edit_requested=true'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 To know more, visit: ', 'raw': '🔗 To know more, visit: '}, {'type': 'link', 'href': 'https://deepmind.google/models/gemini-diffusion/', 'raw': 'https://deepmind.google/models/gemini-diffusion/'}]","Got access to Google's all-new Gemini Diffusion a state-of-the-art text diffusion model. It delivers the performance of Gemini 2.0 Flash-Lite at 5x the speed, generating over 1000 tokens in a fraction of a second and producing impressive results. Below are some initial outputs generated using the model. ♊🔥 + +Gemini Diffusion Playground ✦ : https://deepmind.google.com/frontiers/gemini-diffusion + +Get Access Here : https://docs.google.com/forms/d/1aLm6J13tAkq4v4qwGR3z35W2qWy7mHiiA0wGEpecooo/viewform?edit_requested=true + +🔗 To know more, visit: https://deepmind.google/models/gemini-diffusion/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ePuVLGwepwthdPK_uhgfB.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/3jUJ62MM9pvANqgcMxSHm.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/2n0mUZ1tJcQhLV8fQh3Pz.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/-YKIKFoUBteeqRkcIRvha.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/uq31_SCU5ykYpHj2cXc0M.png'}]",[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'John6666', 'jonkoch', 'AekDevDev', 'ashbuilds'], 'count': 5}, {'reaction': '❤️', 'users': ['prithivMLmods'], 'count': 1}]",2025-05-20 20:18:43,2025-05-20 20:34:00.317,"[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957, 'isFollowing': False}]",/posts/prithivMLmods/419913536892543,2368,"{'language': 'en', 'probability': 0.6138147711753845}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/SHPMR1CFhDyr7XFgOAG7S.png,2.0,Dillan Raynor,DillanRaynor,488813807269018,"[{'type': 'text', 'value': 'I Need Help;', 'raw': 'I Need Help;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I recently published a blog post on the Hugging Face platform from my Pro account:', 'raw': 'I recently published a blog post on the Hugging Face platform from my Pro account:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 How Enterprises Are Actually Using AI to Pull Off Localized Product Launches Without Losing Their Voice', 'raw': '🔗 How Enterprises Are Actually Using AI to Pull Off Localized Product Launches Without Losing Their Voice'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My external link: ', 'raw': 'My external link: '}, {'type': 'link', 'href': 'https://www.heygen.com/translate', 'raw': 'https://www.heygen.com/translate'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'While reviewing the post, I noticed that all external links are marked as rel=""nofollow"". However, I’ve seen other user-published blog posts (e.g., Tiny Agents) where external links are dofollow.', 'raw': 'While reviewing the post, I noticed that all external links are marked as rel=""nofollow"". However, I’ve seen other user-published blog posts (e.g., Tiny Agents) where external links are dofollow.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Since I’m a Pro user and have put effort into creating original, high-quality, non-promotional content that benefits the community, I kindly request you to review my post and consider updating the link attributes to dofollow — or let me know what steps I need to follow to qualify for that.', 'raw': 'Since I’m a Pro user and have put effort into creating original, high-quality, non-promotional content that benefits the community, I kindly request you to review my post and consider updating the link attributes to dofollow — or let me know what steps I need to follow to qualify for that.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If any community contributor is able to assist me or help republish the article with proper dofollow attributes, I’d be truly grateful.', 'raw': 'If any community contributor is able to assist me or help republish the article with proper dofollow attributes, I’d be truly grateful.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Looking forward to your response and really appreciate the work you do for the community. ', 'raw': 'Looking forward to your response and really appreciate the work you do for the community. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks in advance!', 'raw': 'Thanks in advance!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dillan Raynor', 'raw': 'Dillan Raynor'}]","I Need Help; + +I recently published a blog post on the Hugging Face platform from my Pro account: + +🔗 How Enterprises Are Actually Using AI to Pull Off Localized Product Launches Without Losing Their Voice + +My external link: https://www.heygen.com/translate + +While reviewing the post, I noticed that all external links are marked as rel=""nofollow"". However, I’ve seen other user-published blog posts (e.g., Tiny Agents) where external links are dofollow. + +Since I’m a Pro user and have put effort into creating original, high-quality, non-promotional content that benefits the community, I kindly request you to review my post and consider updating the link attributes to dofollow — or let me know what steps I need to follow to qualify for that. + +If any community contributor is able to assist me or help republish the article with proper dofollow attributes, I’d be truly grateful. + +Looking forward to your response and really appreciate the work you do for the community. + +Thanks in advance! +Dillan Raynor",[],[],"[{'reaction': '🧠', 'users': ['John6666', 'imranand', 'EX4L'], 'count': 3}]",2025-05-17 09:41:17,2025-05-19 01:07:23.138,"[{'_id': '65fa4d863cf67ad371ddde3b', 'avatarUrl': '/avatars/74058a4b27aaef9a6907612bdfc2f560.svg', 'fullname': 'moe', 'name': 'alliballi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '68245ce53febb0b649ddd362', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/SHPMR1CFhDyr7XFgOAG7S.png', 'fullname': 'Dillan Raynor', 'name': 'DillanRaynor', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '68260f875403bde6a27068a9', 'avatarUrl': '/avatars/c89a1a019e1d6e2ec1f31c51067b602a.svg', 'fullname': 'Koffi Roxane', 'name': 'Koffy23', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/DillanRaynor/488813807269018,2160,"{'language': 'en', 'probability': 0.9287950992584229}",4 +/avatars/c99bf0f3a4518cad3b1c9bc6f6053328.svg,,George Williams,sourcesync,808469197193004,"[{'type': 'text', 'value': ""Hi! I'm having an issue converting a Phi3 model to a torch scripted model. Code, error, and versions below. Any guidance would be super helpful, thanks!"", 'raw': ""Hi! I'm having an issue converting a Phi3 model to a torch scripted model. Code, error, and versions below. Any guidance would be super helpful, thanks!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here is the code:', 'raw': 'Here is the code:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'from transformers import AutoModelForCausalLM\nimport torch\n\nbase_model = ""microsoft/Phi-3-mini-4k-instruct""\n\nmodel = AutoModelForCausalLM.from_pretrained(\n base_model,\n return_dict=True,\n low_cpu_mem_usage=True,\n torch_dtype=torch.float16,\n device_map=""auto"",\n trust_remote_code=True,\n torchscript=True\n)\n\nscripted_model = torch.jit.script(model)', 'raw': '```\nfrom transformers import AutoModelForCausalLM\nimport torch\n\nbase_model = ""microsoft/Phi-3-mini-4k-instruct""\n\nmodel = AutoModelForCausalLM.from_pretrained(\n base_model,\n return_dict=True,\n low_cpu_mem_usage=True,\n torch_dtype=torch.float16,\n device_map=""auto"",\n trust_remote_code=True,\n torchscript=True\n)\n\nscripted_model = torch.jit.script(model)\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here is the error:', 'raw': 'Here is the error:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ' File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 910, in \n args = [build_expr(ctx, py_arg) for py_arg in expr.args]\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 417, in __call__\n return method(ctx, node)\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 1255, in build_GeneratorExp\n return ExprBuilder.build_ListComp(ctx, stmt)\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 1244, in build_ListComp\n raise NotSupportedError(r, ""Comprehension ifs are not supported yet"")\ntorch.jit.frontend.NotSupportedError: Comprehension ifs are not supported yet:\n File "".../.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1148\n next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache\n if not return_dict:\n return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None)\n return BaseModelOutputWithPast(\n last_hidden_state=hidden_states', 'raw': '```\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 910, in \n args = [build_expr(ctx, py_arg) for py_arg in expr.args]\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 417, in __call__\n return method(ctx, node)\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 1255, in build_GeneratorExp\n return ExprBuilder.build_ListComp(ctx, stmt)\n File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 1244, in build_ListComp\n raise NotSupportedError(r, ""Comprehension ifs are not supported yet"")\ntorch.jit.frontend.NotSupportedError: Comprehension ifs are not supported yet:\n File "".../.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1148\n next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache\n if not return_dict:\n return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None)\n return BaseModelOutputWithPast(\n last_hidden_state=hidden_states\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'torch==2.5.1', 'raw': 'torch==2.5.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'transformers==4.51.3', 'raw': 'transformers==4.51.3'}]","Hi! I'm having an issue converting a Phi3 model to a torch scripted model. Code, error, and versions below. Any guidance would be super helpful, thanks! + +Here is the code: + +``` +from transformers import AutoModelForCausalLM +import torch + +base_model = ""microsoft/Phi-3-mini-4k-instruct"" + +model = AutoModelForCausalLM.from_pretrained( + base_model, + return_dict=True, + low_cpu_mem_usage=True, + torch_dtype=torch.float16, + device_map=""auto"", + trust_remote_code=True, + torchscript=True +) + +scripted_model = torch.jit.script(model) +``` + +Here is the error: + +``` + File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 910, in + args = [build_expr(ctx, py_arg) for py_arg in expr.args] + File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 417, in __call__ + return method(ctx, node) + File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 1255, in build_GeneratorExp + return ExprBuilder.build_ListComp(ctx, stmt) + File "".../anaconda3/envs/qcomm_phi3.5instruct_aihub_py_310/lib/python3.10/site-packages/torch/jit/frontend.py"", line 1244, in build_ListComp + raise NotSupportedError(r, ""Comprehension ifs are not supported yet"") +torch.jit.frontend.NotSupportedError: Comprehension ifs are not supported yet: + File "".../.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1148 + next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache + if not return_dict: + return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states +``` + +torch==2.5.1 +transformers==4.51.3",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-05-17 02:58:57,2025-05-17 13:23:25.257,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '66be647e70f0b1935a709623', 'avatarUrl': '/avatars/c99bf0f3a4518cad3b1c9bc6f6053328.svg', 'fullname': 'George Williams', 'name': 'sourcesync', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/sourcesync/808469197193004,608,"{'language': 'en', 'probability': 0.5120981931686401}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,340514834154581,"[{'type': 'text', 'value': '🌟 Introducing Ilúvatar: Creative Design & Invention AI 🌟', 'raw': '🌟 Introducing Ilúvatar: Creative Design & Invention AI 🌟'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link: ', 'raw': 'Link: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/IDEA-DESIGN'}, 'url': 'https://huggingface.co/spaces/ginipick/IDEA-DESIGN', 'raw': 'https://huggingface.co/spaces/ginipick/IDEA-DESIGN'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hello, AI creators! 👋 ', 'raw': 'Hello, AI creators! 👋 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Today I'm introducing Ilúvatar, an amazing tool that automatically generates innovative design and invention ideas."", 'raw': ""Today I'm introducing Ilúvatar, an amazing tool that automatically generates innovative design and invention ideas.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 AI-Powered Idea Generation: Creates detailed design/invention ideas from simple prompts', 'raw': '🧠 AI-Powered Idea Generation: Creates detailed design/invention ideas from simple prompts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Web Search Integration: Incorporates real-time information to reflect latest trends', 'raw': '🔍 Web Search Integration: Incorporates real-time information to reflect latest trends'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Kaggle Dataset Analysis: Provides data-driven insights', 'raw': '📊 Kaggle Dataset Analysis: Provides data-driven insights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ Automatic Image Generation: Creates image prompts visualizing your ideas', 'raw': '🖼️ Automatic Image Generation: Creates image prompts visualizing your ideas'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 File Upload Support: Analyzes reference materials (text, CSV, PDF)', 'raw': '📁 File Upload Support: Analyzes reference materials (text, CSV, PDF)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""📈 Business Frameworks: Includes SWOT, Porter's 5 Forces, BCG Matrix analyses"", 'raw': ""📈 Business Frameworks: Includes SWOT, Porter's 5 Forces, BCG Matrix analyses""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌏 Multilingual Support: Available in both English and Korean', 'raw': '🌏 Multilingual Support: Available in both English and Korean'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Perfect For', 'raw': '🎯 Perfect For'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💼 Product Designers/Developers: When you need fresh product concepts', 'raw': '💼 Product Designers/Developers: When you need fresh product concepts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔬 Researchers/Inventors: When you need innovative idea inspiration', 'raw': '🔬 Researchers/Inventors: When you need innovative idea inspiration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Planners/Marketers: When you need differentiated business strategies', 'raw': '📝 Planners/Marketers: When you need differentiated business strategies'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎓 Students/Educators: For creative thinking and problem-solving education', 'raw': '🎓 Students/Educators: For creative thinking and problem-solving education'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Start Creating Now!', 'raw': '🚀 Start Creating Now!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Utilizing 24 categories and ~1,100 items as design SEEDS, the system generates combinations across 2-6 depth levels, creating up to 1,100 trillion design variables. A ""water-air transitional device"" might combine structural self-reorganization, material transformation, biomimetic movement, and propulsion optimization.', 'raw': 'Utilizing 24 categories and ~1,100 items as design SEEDS, the system generates combinations across 2-6 depth levels, creating up to 1,100 trillion design variables. A ""water-air transitional device"" might combine structural self-reorganization, material transformation, biomimetic movement, and propulsion optimization.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The LLM analyzes correlations between user queries and design combinations, identifying innovative elements like hybrid propulsion systems inspired by nature.', 'raw': 'The LLM analyzes correlations between user queries and design combinations, identifying innovative elements like hybrid propulsion systems inspired by nature.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'By integrating data from Kaggle datasets, web searches, and research, the system prioritizes groundbreaking combinations such as ""graphene morphing wings + AI fluid dynamics + quantum dot solar cells"" with feasibility assessments.', 'raw': 'By integrating data from Kaggle datasets, web searches, and research, the system prioritizes groundbreaking combinations such as ""graphene morphing wings + AI fluid dynamics + quantum dot solar cells"" with feasibility assessments.'}]","🌟 Introducing Ilúvatar: Creative Design & Invention AI 🌟 + +Link: https://huggingface.co/spaces/ginipick/IDEA-DESIGN + +Hello, AI creators! 👋 +Today I'm introducing Ilúvatar, an amazing tool that automatically generates innovative design and invention ideas. + +✨ Key Features + +🧠 AI-Powered Idea Generation: Creates detailed design/invention ideas from simple prompts +🔍 Web Search Integration: Incorporates real-time information to reflect latest trends +📊 Kaggle Dataset Analysis: Provides data-driven insights +🖼️ Automatic Image Generation: Creates image prompts visualizing your ideas +📁 File Upload Support: Analyzes reference materials (text, CSV, PDF) +📈 Business Frameworks: Includes SWOT, Porter's 5 Forces, BCG Matrix analyses +🌏 Multilingual Support: Available in both English and Korean + +🎯 Perfect For + +💼 Product Designers/Developers: When you need fresh product concepts +🔬 Researchers/Inventors: When you need innovative idea inspiration +📝 Planners/Marketers: When you need differentiated business strategies +🎓 Students/Educators: For creative thinking and problem-solving education + +🚀 Start Creating Now! +Utilizing 24 categories and ~1,100 items as design SEEDS, the system generates combinations across 2-6 depth levels, creating up to 1,100 trillion design variables. A ""water-air transitional device"" might combine structural self-reorganization, material transformation, biomimetic movement, and propulsion optimization. +The LLM analyzes correlations between user queries and design combinations, identifying innovative elements like hybrid propulsion systems inspired by nature. +By integrating data from Kaggle datasets, web searches, and research, the system prioritizes groundbreaking combinations such as ""graphene morphing wings + AI fluid dynamics + quantum dot solar cells"" with feasibility assessments.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/lHSAwY43dmmXdyRlzK4cj.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/HBDD3yGgRdSGPIRuVUt1P.mp4'}]",[],"[{'reaction': '🔥', 'users': ['ginipick', 'seawolf2357', 'hackstone', 'chrissamt', 'soulbariansz', 'nqzfaizal77ai', 'immunobiotech', 'chalspugger97', 'John6666', 'yokoha', 'openfree', 'LowFrameHighLag', 'japojapa', 'script64b', 'viidfedenwad', 'abdelazizEl7or', 'itpasotm', 'byteprobe', 'timeparadoxs', 'marzwillson'], 'count': 20}, {'reaction': '🚀', 'users': ['ginipick', 'seawolf2357', 'yokoha', 'script64b', 'openfree', 'viidfedenwad', 'itpasotm'], 'count': 7}, {'reaction': '👀', 'users': ['ginipick', 'seawolf2357', 'openfree', 'viidfedenwad', 'itpasotm'], 'count': 5}, {'reaction': '🤝', 'users': ['ginipick', 'seawolf2357', 'openfree', 'itpasotm'], 'count': 4}, {'reaction': '❤️', 'users': ['ginipick', 'openfree', 'itpasotm'], 'count': 3}, {'reaction': '👍', 'users': ['ginipick', 'openfree', 'itpasotm'], 'count': 3}, {'reaction': '🤗', 'users': ['ginipick', 'openfree', 'itpasotm'], 'count': 3}]",2025-05-16 20:52:58,2025-05-17 04:55:58.419,[],/posts/ginipick/340514834154581,3372,"{'language': 'en', 'probability': 0.8147211074829102}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/658a4c914bb41498f7d5e3ca/zMJjxfazi9ePc7GZ1jRAE.jpeg,66.0,Pro Creations,ProCreations,912570752513965,"[{'type': 'text', 'value': 'I made a space!', 'raw': 'I made a space!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out ', 'raw': 'Check out '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/ProCreations/realtime-ai-visualization', 'raw': 'https://huggingface.co/spaces/ProCreations/realtime-ai-visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This cool space visualizes a real neural net in real time. It trains a real 199 parameter model on XOR. With baby mode for non-devs and advanced mode for developers or enthusiasts, (hopefully) everyone will understand!', 'raw': 'This cool space visualizes a real neural net in real time. It trains a real 199 parameter model on XOR. With baby mode for non-devs and advanced mode for developers or enthusiasts, (hopefully) everyone will understand!'}]","I made a space! + +Check out +https://huggingface.co/spaces/ProCreations/realtime-ai-visualization +This cool space visualizes a real neural net in real time. It trains a real 199 parameter model on XOR. With baby mode for non-devs and advanced mode for developers or enthusiasts, (hopefully) everyone will understand!",[],[],"[{'reaction': '👀', 'users': ['japojapa', 'John6666', 'nomadicsynth'], 'count': 3}]",2025-05-16 16:30:09,2025-05-16 16:30:09.037,[],/posts/ProCreations/912570752513965,1906,"{'language': 'en', 'probability': 0.7487531304359436}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,339574377290946,"[{'type': 'text', 'value': '2 cool papers from Alibaba Qwen team featured in today’s Daily Papers🔥', 'raw': '2 cool papers from Alibaba Qwen team featured in today’s Daily Papers🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ WorldPM: Scaling Human Preference Modeling', 'raw': '✨ WorldPM: Scaling Human Preference Modeling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2505.10527'}, 'url': 'https://huggingface.co/papers/2505.10527', 'raw': 'https://huggingface.co/papers/2505.10527', 'label': 'WorldPM: Scaling Human Preference Modeling (2505.10527)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Parallel Scaling Law for Language Models', 'raw': '✨ Parallel Scaling Law for Language Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2505.10475'}, 'url': 'https://huggingface.co/papers/2505.10475', 'raw': 'https://huggingface.co/papers/2505.10475', 'label': 'Parallel Scaling Law for Language Models (2505.10475)'}]","2 cool papers from Alibaba Qwen team featured in today’s Daily Papers🔥 + +✨ WorldPM: Scaling Human Preference Modeling +https://huggingface.co/papers/2505.10527 +✨ Parallel Scaling Law for Language Models +https://huggingface.co/papers/2505.10475","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/0tBXBHVb-UzpueI5R1TdP.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'abdelazizEl7or'], 'count': 2}, {'reaction': '🤗', 'users': ['eaddario', 'JohnRoger'], 'count': 2}]",2025-05-16 14:39:31,2025-05-16 14:39:31.416,[],/posts/AdinaY/339574377290946,1941,"{'language': 'en', 'probability': 0.70326167345047}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1617550879179-noauth.jpeg,163.0,Charles Bensimon,cbensimon,356529804559377,"[{'type': 'text', 'value': '🚀 ZeroGPU ', 'raw': '🚀 ZeroGPU '}, {'type': 'inline_code', 'code': 'medium', 'raw': '`medium`'}, {'type': 'text', 'value': ' size is now available as a power-user feature', 'raw': ' size is now available as a power-user feature'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Nothing too fancy for now—ZeroGPU Spaces still default to ', 'raw': 'Nothing too fancy for now—ZeroGPU Spaces still default to '}, {'type': 'inline_code', 'code': 'large', 'raw': '`large`'}, {'type': 'text', 'value': ' (70GB VRAM)—but this paves the way for:', 'raw': ' (70GB VRAM)—but this paves the way for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 💰 size-based quotas / pricing (', 'raw': '- 💰 size-based quotas / pricing ('}, {'type': 'inline_code', 'code': 'medium', 'raw': '`medium`'}, {'type': 'text', 'value': ' will offer significantly more usage than ', 'raw': ' will offer significantly more usage than '}, {'type': 'inline_code', 'code': 'large', 'raw': '`large`'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🦣 the upcoming ', 'raw': '- 🦣 the upcoming '}, {'type': 'inline_code', 'code': 'xlarge', 'raw': '`xlarge`'}, {'type': 'text', 'value': ' size (141GB VRAM)', 'raw': ' size (141GB VRAM)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can as of now control GPU size via a Space variable. Accepted values:', 'raw': 'You can as of now control GPU size via a Space variable. Accepted values:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'inline_code', 'code': 'auto', 'raw': '`auto`'}, {'type': 'text', 'value': ' (future default)', 'raw': ' (future default)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'inline_code', 'code': 'medium', 'raw': '`medium`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'inline_code', 'code': 'large', 'raw': '`large`'}, {'type': 'text', 'value': ' (current default)', 'raw': ' (current default)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The auto mode checks total CUDA tensor size during startup:', 'raw': 'The auto mode checks total CUDA tensor size during startup:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- More than 30GB → ', 'raw': '- More than 30GB → '}, {'type': 'inline_code', 'code': 'large', 'raw': '`large`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Otherwise → ', 'raw': '- Otherwise → '}, {'type': 'inline_code', 'code': 'medium', 'raw': '`medium`'}]","🚀 ZeroGPU `medium` size is now available as a power-user feature + +Nothing too fancy for now—ZeroGPU Spaces still default to `large` (70GB VRAM)—but this paves the way for: +- 💰 size-based quotas / pricing (`medium` will offer significantly more usage than `large`) +- 🦣 the upcoming `xlarge` size (141GB VRAM) + +You can as of now control GPU size via a Space variable. Accepted values: +- `auto` (future default) +- `medium` +- `large` (current default) + +The auto mode checks total CUDA tensor size during startup: +- More than 30GB → `large` +- Otherwise → `medium`","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6069de23a0e75b0dd0135620/X-2xxL78XP3RO_TYTbYoA.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['julien-c', 'victor', 'AdinaY', 'davanstrien', 'sergiopaniego', 'Nymbo', 'hysts', '9voltfan2009', 'merve', 'KingNish', 'YaTharThShaRma999', 'fakezeta', 'alsate', 'John6666', 'LowFrameHighLag', 'prithivMLmods', 'pinakinathc', 'nyuuzyou', 'viidfedenwad', 'abdelazizEl7or', 'Whoaa57', 'byteprobe', 'kvnbbg', 'gonegirl', 'meganariley', 'radames'], 'count': 26}, {'reaction': '👀', 'users': ['Aurelien-Morgan', 'silvaa09', 'meganariley', 'radames', 'cloudsamurai'], 'count': 5}, {'reaction': '🚀', 'users': ['John6666', 'gonegirl', 'meganariley', 'radames'], 'count': 4}, {'reaction': '🤗', 'users': ['John6666', 'eaddario', 'meganariley', 'radames'], 'count': 4}, {'reaction': '👍', 'users': ['fffiloni', 'meganariley', 'Moibe', 'radames'], 'count': 4}]",2025-05-16 14:34:32,2025-05-18 04:32:01.452,"[{'_id': '6069de23a0e75b0dd0135620', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1617550879179-noauth.jpeg', 'fullname': 'Charles Bensimon', 'name': 'cbensimon', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 163, 'isFollowing': False}, {'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}, {'_id': '68180c15ec617dec1ad0bbc1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/pHm0piLsgEO-_tDHUlXyl.jpeg', 'fullname': 'Joshua Jeremiah Erut-Parry', 'name': 'JoshuaSmart', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/cbensimon/356529804559377,5827,"{'language': 'en', 'probability': 0.7198306322097778}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/61c141342aac764ce1654e43/81AwoT5IQ_Xdw0OVw7TKu.jpeg,3482.0,Loubna Ben Allal,loubnabnl,367240842210951,"[{'type': 'text', 'value': 'SmolVLM is now available on PocketPal — you can run it offline on your smartphone to interpret the world around you. 🌍📱', 'raw': 'SmolVLM is now available on PocketPal — you can run it offline on your smartphone to interpret the world around you. 🌍📱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And check out this real-time camera demo by ', 'raw': 'And check out this real-time camera demo by '}, {'type': 'mention', 'user': 'ngxson', 'raw': '@ngxson'}, {'type': 'text', 'value': ', powered by llama.cpp:', 'raw': ', powered by llama.cpp:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/ngxson/smolvlm-realtime-webcam', 'raw': 'https://github.com/ngxson/smolvlm-realtime-webcam'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://x.com/pocketpal_ai', 'raw': 'https://x.com/pocketpal_ai'}]","SmolVLM is now available on PocketPal — you can run it offline on your smartphone to interpret the world around you. 🌍📱 + +And check out this real-time camera demo by @ngxson, powered by llama.cpp: +https://github.com/ngxson/smolvlm-realtime-webcam +https://x.com/pocketpal_ai","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61c141342aac764ce1654e43/F5lX0pWYAw2yYRhitsEch.png'}]","[{'_id': '63ca214abedad7e2bf1d1517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1674191139776-noauth.png', 'fullname': 'Xuan-Son Nguyen', 'name': 'ngxson', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 257}]","[{'reaction': '❤️', 'users': ['ngxson', 'John6666', 'mku64', 'FM-1976', 'BoltMonkey', 'linoyts', 'abdelazizEl7or', 'byteprobe', 'akashjss', 'SaylorTwift', 's3nh'], 'count': 11}, {'reaction': '😎', 'users': ['ngxson'], 'count': 1}]",2025-05-16 13:51:58,2025-05-22 07:21:08.205,"[{'_id': '651657087f8b9fc0f7ac15e9', 'avatarUrl': '/avatars/e45a2bafed23535a7f0932a2ad4a7a02.svg', 'fullname': 'Akash Gupta', 'name': 'akashjss', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '61c141342aac764ce1654e43', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61c141342aac764ce1654e43/81AwoT5IQ_Xdw0OVw7TKu.jpeg', 'fullname': 'Loubna Ben Allal', 'name': 'loubnabnl', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3482, 'isFollowing': False}]",/posts/loubnabnl/367240842210951,2918,"{'language': 'en', 'probability': 0.8489851355552673}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,547591602422107,"[{'type': 'text', 'value': 'New sota open-source depth estimation: Marigold v1-1 🌼', 'raw': 'New sota open-source depth estimation: Marigold v1-1 🌼'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> normal maps, depth maps of scenes & faces ', 'raw': '> normal maps, depth maps of scenes & faces '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prs-eth/marigold-normals'}, 'url': 'https://huggingface.co/spaces/prs-eth/marigold-normals', 'raw': 'https://huggingface.co/spaces/prs-eth/marigold-normals'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prs-eth/marigold'}, 'url': 'https://huggingface.co/spaces/prs-eth/marigold', 'raw': 'https://huggingface.co/spaces/prs-eth/marigold'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> get albedo (true color) and BRDF (texture) maps of scenes ', 'raw': '> get albedo (true color) and BRDF (texture) maps of scenes '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prs-eth/marigold-intrinsics'}, 'url': 'https://huggingface.co/spaces/prs-eth/marigold-intrinsics', 'raw': 'https://huggingface.co/spaces/prs-eth/marigold-intrinsics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> they even release a depth-to-3D printer format demo 😮 ', 'raw': '> they even release a depth-to-3D printer format demo 😮 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prs-eth/depth-to-3d-print'}, 'url': 'https://huggingface.co/spaces/prs-eth/depth-to-3d-print', 'raw': 'https://huggingface.co/spaces/prs-eth/depth-to-3d-print'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All models are here ', 'raw': 'All models are here '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prs-eth/marigold-computer-vision-6669e9e3d3ee30f48214b9ba'}, 'url': 'https://huggingface.co/collections/prs-eth/marigold-computer-vision-6669e9e3d3ee30f48214b9ba', 'raw': 'https://huggingface.co/collections/prs-eth/marigold-computer-vision-6669e9e3d3ee30f48214b9ba'}, {'type': 'new_line', 'raw': '\n'}]","New sota open-source depth estimation: Marigold v1-1 🌼 + +> normal maps, depth maps of scenes & faces https://huggingface.co/spaces/prs-eth/marigold-normals https://huggingface.co/spaces/prs-eth/marigold +> get albedo (true color) and BRDF (texture) maps of scenes https://huggingface.co/spaces/prs-eth/marigold-intrinsics +> they even release a depth-to-3D printer format demo 😮 https://huggingface.co/spaces/prs-eth/depth-to-3d-print + +All models are here https://huggingface.co/collections/prs-eth/marigold-computer-vision-6669e9e3d3ee30f48214b9ba +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/r-fZZVe3bd2tWb-FfCNyL.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'loubnabnl', 'Agung1453', 'rifqimaruf', 'abdelazizEl7or', 'byteprobe'], 'count': 6}, {'reaction': '🤝', 'users': ['Agung1453'], 'count': 1}, {'reaction': '👍', 'users': ['Agung1453'], 'count': 1}, {'reaction': '🤗', 'users': ['Agung1453'], 'count': 1}]",2025-05-16 10:07:59,2025-05-16 10:07:59.840,[],/posts/merve/547591602422107,2284,"{'language': 'en', 'probability': 0.652411699295044}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png,3314.0,ben burtenshaw,burtenshaw,628550559998121,"[{'type': 'text', 'value': ""We're thrilled to announce the launch of our comprehensive Model Context Protocol (MCP) Course! This free program is designed to take learners from foundational understanding to practical application of MCP in AI."", 'raw': ""We're thrilled to announce the launch of our comprehensive Model Context Protocol (MCP) Course! This free program is designed to take learners from foundational understanding to practical application of MCP in AI.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow the course on the hub: ', 'raw': 'Follow the course on the hub: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'mcp-course'}, 'url': 'https://huggingface.co/mcp-course', 'raw': 'https://huggingface.co/mcp-course', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/itgTDqMrnvgNfJZJ4YmCt.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this course, you will:', 'raw': 'In this course, you will:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 Study Model Context Protocol in theory, design, and practice.', 'raw': '📖 Study Model Context Protocol in theory, design, and practice.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧑\u200d💻 Learn to use established MCP SDKs and frameworks.', 'raw': '🧑\u200d💻 Learn to use established MCP SDKs and frameworks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💾 Share your projects and explore applications created by the community.', 'raw': '💾 Share your projects and explore applications created by the community.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 Participate in challenges and evaluate your MCP implementations.', 'raw': '🏆 Participate in challenges and evaluate your MCP implementations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎓 Earn a certificate of completion.', 'raw': '🎓 Earn a certificate of completion.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""At the end of this course, you'll understand how MCP works and how to build your own AI applications that leverage external data and tools using the latest MCP standards."", 'raw': ""At the end of this course, you'll understand how MCP works and how to build your own AI applications that leverage external data and tools using the latest MCP standards.""}]","We're thrilled to announce the launch of our comprehensive Model Context Protocol (MCP) Course! This free program is designed to take learners from foundational understanding to practical application of MCP in AI. + +Follow the course on the hub: https://huggingface.co/mcp-course + +In this course, you will: +📖 Study Model Context Protocol in theory, design, and practice. +🧑‍💻 Learn to use established MCP SDKs and frameworks. +💾 Share your projects and explore applications created by the community. +🏆 Participate in challenges and evaluate your MCP implementations. +🎓 Earn a certificate of completion. + +At the end of this course, you'll understand how MCP works and how to build your own AI applications that leverage external data and tools using the latest MCP standards.",[],[],"[{'reaction': '🚀', 'users': ['muzaqi69', 'Sridatta7', 'user1000x', 'John6666', 'Ameeeee', 'prithivMLmods', 'alsate', 'KingNish', 'skmh', 'wizdatcodes', 'abdelazizEl7or', 'byteprobe', 'Goodnight7'], 'count': 13}]",2025-05-16 07:46:36,2025-05-17 09:44:29.538,"[{'_id': '65ae0a903b9e1f0f30d8eebc', 'avatarUrl': '/avatars/395d40b7c46e548ed3962a914f7a6694.svg', 'fullname': 'GenAICoder', 'name': 'GenAICoder', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}]",/posts/burtenshaw/628550559998121,3224,"{'language': 'en', 'probability': 0.9048447608947754}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1606406298765-noauth.jpeg,368.0,Albert Villanova del Moral,albertvillanova,859296817152556,"[{'type': 'text', 'value': 'New in smolagents v1.16.0:', 'raw': 'New in smolagents v1.16.0:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Bing support in WebSearchTool', 'raw': '🔍 Bing support in WebSearchTool'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐍 Custom functions & executor_kwargs in LocalPythonExecutor', 'raw': '🐍 Custom functions & executor_kwargs in LocalPythonExecutor'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔧 Streaming GradioUI fixes', 'raw': '🔧 Streaming GradioUI fixes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Local web agents via api_base & api_key', 'raw': '🌐 Local web agents via api_base & api_key'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Better docs', 'raw': '📚 Better docs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://github.com/huggingface/smolagents/releases/tag/v1.16.0', 'raw': 'https://github.com/huggingface/smolagents/releases/tag/v1.16.0'}]","New in smolagents v1.16.0: +🔍 Bing support in WebSearchTool +🐍 Custom functions & executor_kwargs in LocalPythonExecutor +🔧 Streaming GradioUI fixes +🌐 Local web agents via api_base & api_key +📚 Better docs + +👉 https://github.com/huggingface/smolagents/releases/tag/v1.16.0",[],[],"[{'reaction': '🤗', 'users': ['John6666', 'Ameeeee', 'Nymbo', 'KingNish', 'abdelazizEl7or', 'byteprobe', 'm-ric'], 'count': 7}, {'reaction': '🔥', 'users': ['edwardgreen', 'loubnabnl', 'Nymbo', 'KingNish', 'm-ric'], 'count': 5}, {'reaction': '😎', 'users': ['John6666', 'Nymbo'], 'count': 2}]",2025-05-16 07:06:25,2025-05-16 07:06:25.895,[],/posts/albertvillanova/859296817152556,2484,"{'language': 'en', 'probability': 0.5835049152374268}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,869555651580897,"[{'type': 'text', 'value': 'Transfer Any Clothing Into A New Person & Turn Any Person Into A 3D Figure - ComfyUI Tutorial', 'raw': 'Transfer Any Clothing Into A New Person & Turn Any Person Into A 3D Figure - ComfyUI Tutorial'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ComfyUI is super hard to use but I have literally prepared 1-click way to install and use 2 amazing workflows. First workflow is generating a person wearing any clothing. The second workflow is turning any person image into a 3D toy like figure image.', 'raw': 'ComfyUI is super hard to use but I have literally prepared 1-click way to install and use 2 amazing workflows. First workflow is generating a person wearing any clothing. The second workflow is turning any person image into a 3D toy like figure image.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tutorial Link : ', 'raw': 'Tutorial Link : '}, {'type': 'link', 'href': 'https://youtu.be/ZzYnhKeaJBs', 'raw': 'https://youtu.be/ZzYnhKeaJBs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video Chapters', 'raw': 'Video Chapters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:00:00 Intro: Two One-Click ComfyUI Workflows (Clothing Gen & 3D Figure)', 'raw': '0:00:00 Intro: Two One-Click ComfyUI Workflows (Clothing Gen & 3D Figure)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:00:34 Effort & Goal: Easy Installation & Use of Complex Workflows', 'raw': '0:00:34 Effort & Goal: Easy Installation & Use of Complex Workflows'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:00:49 Setup Part 1: ComfyUI Prerequisite & Downloading Project Zip File', 'raw': '0:00:49 Setup Part 1: ComfyUI Prerequisite & Downloading Project Zip File'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""0:01:06 Setup Part 2: Extracting Zip into ComfyUI Folder (WinRAR 'Extract Here' Tip)"", 'raw': ""0:01:06 Setup Part 2: Extracting Zip into ComfyUI Folder (WinRAR 'Extract Here' Tip)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:01:18 Setup Part 3: Running update_comfyui.bat for Latest ComfyUI Version', 'raw': '0:01:18 Setup Part 3: Running update_comfyui.bat for Latest ComfyUI Version'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:01:37 Setup Part 4: Running install_clothing_and_3D.bat (Installs Nodes & Requirements)', 'raw': '0:01:37 Setup Part 4: Running install_clothing_and_3D.bat (Installs Nodes & Requirements)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:02:03 Model Downloads: Intro to Swarm UI Auto-Installer & Automatic Updater', 'raw': '0:02:03 Model Downloads: Intro to Swarm UI Auto-Installer & Automatic Updater'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:02:28 Using Swarm UI: Launching Windows_start_download_models_app.bat', 'raw': '0:02:28 Using Swarm UI: Launching Windows_start_download_models_app.bat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:02:51 Model Selection in Swarm UI: Flux Dev GGUF Q8 & Low VRAM Options', 'raw': '0:02:51 Model Selection in Swarm UI: Flux Dev GGUF Q8 & Low VRAM Options'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:03:05 Configuring Model Download Path: Pointing to Your ComfyUI/models Folder', 'raw': '0:03:05 Configuring Model Download Path: Pointing to Your ComfyUI/models Folder'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:03:22 Downloading Flux Model: GGUF Quality Levels Explained (Q8, Q6, Q5, Q4, KM, KS)', 'raw': '0:03:22 Downloading Flux Model: GGUF Quality Levels Explained (Q8, Q6, Q5, Q4, KM, KS)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""0:04:10 Downloading Workflow Bundle: 'Clothing Migration Workflow Bundle' for All Models"", 'raw': ""0:04:10 Downloading Workflow Bundle: 'Clothing Migration Workflow Bundle' for All Models""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:04:38 Starting ComfyUI: Using Windows_run_GPU.bat & Optional .bat File Customization', 'raw': '0:04:38 Starting ComfyUI: Using Windows_run_GPU.bat & Optional .bat File Customization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:05:16 Workflow 1 (Clothing): Loading via Drag & Drop, Selecting Input Garment Image', 'raw': '0:05:16 Workflow 1 (Clothing): Loading via Drag & Drop, Selecting Input Garment Image'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:05:33 Workflow 1 (Clothing) Params: Crafting the Main Generation Prompt & Adding Extra Text', 'raw': '0:05:33 Workflow 1 (Clothing) Params: Crafting the Main Generation Prompt & Adding Extra Text'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:06:02 Workflow 1 (Clothing) Params: GPU-Dependent Model Loader (GGUF Q8 vs Full Precision FP16)', 'raw': '0:06:02 Workflow 1 (Clothing) Params: GPU-Dependent Model Loader (GGUF Q8 vs Full Precision FP16)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '0:06:22 Workflow 1 (Clothing) Low VRAM: Block Swapping with FP16 Flux Dev Model (UNET Loader)', 'raw': '0:06:22 Workflow 1 (Clothing) Low VRAM: Block Swapping with FP16 Flux Dev Model (UNET Loader)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}]","Transfer Any Clothing Into A New Person & Turn Any Person Into A 3D Figure - ComfyUI Tutorial + +ComfyUI is super hard to use but I have literally prepared 1-click way to install and use 2 amazing workflows. First workflow is generating a person wearing any clothing. The second workflow is turning any person image into a 3D toy like figure image. + +Tutorial Link : https://youtu.be/ZzYnhKeaJBs + +Video Chapters +0:00:00 Intro: Two One-Click ComfyUI Workflows (Clothing Gen & 3D Figure) +0:00:34 Effort & Goal: Easy Installation & Use of Complex Workflows +0:00:49 Setup Part 1: ComfyUI Prerequisite & Downloading Project Zip File +0:01:06 Setup Part 2: Extracting Zip into ComfyUI Folder (WinRAR 'Extract Here' Tip) +0:01:18 Setup Part 3: Running update_comfyui.bat for Latest ComfyUI Version +0:01:37 Setup Part 4: Running install_clothing_and_3D.bat (Installs Nodes & Requirements) +0:02:03 Model Downloads: Intro to Swarm UI Auto-Installer & Automatic Updater +0:02:28 Using Swarm UI: Launching Windows_start_download_models_app.bat +0:02:51 Model Selection in Swarm UI: Flux Dev GGUF Q8 & Low VRAM Options +0:03:05 Configuring Model Download Path: Pointing to Your ComfyUI/models Folder +0:03:22 Downloading Flux Model: GGUF Quality Levels Explained (Q8, Q6, Q5, Q4, KM, KS) +0:04:10 Downloading Workflow Bundle: 'Clothing Migration Workflow Bundle' for All Models +0:04:38 Starting ComfyUI: Using Windows_run_GPU.bat & Optional .bat File Customization +0:05:16 Workflow 1 (Clothing): Loading via Drag & Drop, Selecting Input Garment Image +0:05:33 Workflow 1 (Clothing) Params: Crafting the Main Generation Prompt & Adding Extra Text +0:06:02 Workflow 1 (Clothing) Params: GPU-Dependent Model Loader (GGUF Q8 vs Full Precision FP16) +0:06:22 Workflow 1 (Clothing) Low VRAM: Block Swapping with FP16 Flux Dev Model (UNET Loader) +. +. +. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/CLSETQ0dlkxaODpHFNoGG.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/AI2rvk-Td2Hzd5ZHRfYDg.png'}]",[],"[{'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666', 'waynewu98', 'Edem1122', 'Moibe', 'djuna', 'siyihu'], 'count': 7}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'priyesh8201', 'siyihu'], 'count': 3}, {'reaction': '🔥', 'users': ['MonsterMMORPG', 'TahirC'], 'count': 2}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'kim-Ya'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-05-13 00:54:14,2025-05-13 00:54:25.546,[],/posts/MonsterMMORPG/869555651580897,2768,"{'language': 'en', 'probability': 0.547936201095581}",0 +/avatars/473890454810cc938bd24597e76637ce.svg,22.0,Cody Steinmetz,codys12,705081891087680,"[{'type': 'text', 'value': 'Introducing bitnet-r1-llama-8b and bitnet-r1-qwen-32b preview! These models are the first successful sub 1-billion-token finetune to BitNet architecture. We discovered that by adding an aditional input RMSNorm to each linear, you can finetune directly to BitNet with fast convergence to original model performance!', 'raw': 'Introducing bitnet-r1-llama-8b and bitnet-r1-qwen-32b preview! These models are the first successful sub 1-billion-token finetune to BitNet architecture. We discovered that by adding an aditional input RMSNorm to each linear, you can finetune directly to BitNet with fast convergence to original model performance!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We are working on a pull request to use this extra RMS for any model.', 'raw': 'We are working on a pull request to use this extra RMS for any model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To test these models now, install this fork of transformers:', 'raw': 'To test these models now, install this fork of transformers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'pip install git+https://github.com/Codys12/transformers.git', 'raw': '```\npip install git+https://github.com/Codys12/transformers.git\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then load the models and test:', 'raw': 'Then load the models and test:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'from transformers import (AutoModelForCausalLM, AutoTokenizer)\n\nmodel_id = ""codys12/bitnet-r1-qwen-32b"" \nmodel = AutoModelForCausalLM.from_pretrained(\n model_id,\n device_map=""cuda"",\n)\ntokenizer = AutoTokenizer.from_pretrained(model_id, padding_side=""left"")', 'raw': '```\nfrom transformers import (AutoModelForCausalLM, AutoTokenizer)\n\nmodel_id = ""codys12/bitnet-r1-qwen-32b"" \nmodel = AutoModelForCausalLM.from_pretrained(\n model_id,\n device_map=""cuda"",\n)\ntokenizer = AutoTokenizer.from_pretrained(model_id, padding_side=""left"")\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'bitnet-r1-llama-8b and bitnet-r1-llama-32b were trained on ~ 300M and 200M tokens of the open-thoughts/OpenThoughts-114k dataset respectively, and were still significantly improving at the end of training. This preview simply demonstrates that the concept works, for future training runs we will leave the lm_head unquantized and align the last hidden state with the original model.', 'raw': 'bitnet-r1-llama-8b and bitnet-r1-llama-32b were trained on ~ 300M and 200M tokens of the open-thoughts/OpenThoughts-114k dataset respectively, and were still significantly improving at the end of training. This preview simply demonstrates that the concept works, for future training runs we will leave the lm_head unquantized and align the last hidden state with the original model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Huge thanks to the team that made this possible:', 'raw': 'Huge thanks to the team that made this possible:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Gavin Childress, Aaron Herbst, Gavin Jones, Jasdeep Singh, Eli Vang, and Keagan Weinstock from the MSOE AI Club.', 'raw': 'Gavin Childress, Aaron Herbst, Gavin Jones, Jasdeep Singh, Eli Vang, and Keagan Weinstock from the MSOE AI Club.'}]","Introducing bitnet-r1-llama-8b and bitnet-r1-qwen-32b preview! These models are the first successful sub 1-billion-token finetune to BitNet architecture. We discovered that by adding an aditional input RMSNorm to each linear, you can finetune directly to BitNet with fast convergence to original model performance! + +We are working on a pull request to use this extra RMS for any model. + +To test these models now, install this fork of transformers: +``` +pip install git+https://github.com/Codys12/transformers.git +``` +Then load the models and test: +``` +from transformers import (AutoModelForCausalLM, AutoTokenizer) + +model_id = ""codys12/bitnet-r1-qwen-32b"" +model = AutoModelForCausalLM.from_pretrained( + model_id, + device_map=""cuda"", +) +tokenizer = AutoTokenizer.from_pretrained(model_id, padding_side=""left"") +``` + +bitnet-r1-llama-8b and bitnet-r1-llama-32b were trained on ~ 300M and 200M tokens of the open-thoughts/OpenThoughts-114k dataset respectively, and were still significantly improving at the end of training. This preview simply demonstrates that the concept works, for future training runs we will leave the lm_head unquantized and align the last hidden state with the original model. + +Huge thanks to the team that made this possible: +Gavin Childress, Aaron Herbst, Gavin Jones, Jasdeep Singh, Eli Vang, and Keagan Weinstock from the MSOE AI Club.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'oceansweep', 'DuckyBlender', 'UsernamePartialName', 'harithzulfaizal', 'niko91i', 'RobAgrees', 'marcosstable'], 'count': 8}, {'reaction': '🚀', 'users': ['maldv', 'artpods56', 'RobAgrees', 'marcosstable'], 'count': 4}]",2025-05-12 23:44:08,2025-05-27 11:55:11.626,"[{'_id': '6346b9f83ea42ee2cb4b8407', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6346b9f83ea42ee2cb4b8407/Z1wQ4--pVWfp1qzV6PARw.png', 'fullname': 'Michael McCulloch', 'name': 'MichaelMcCulloch', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/codys12/705081891087680,1974,"{'language': 'en', 'probability': 0.8527505397796631}",1 +/avatars/452f52346ac46cf2d78560fd6873a0b3.svg,,Alexey Tyurin,atyurin,723886389493857,"[{'type': 'text', 'value': 'New Release: VagueIntent Datasets for Hospitality Intent Classification.', 'raw': 'New Release: VagueIntent Datasets for Hospitality Intent Classification.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Introducing the VagueIntent collection - a new resource for improving hospitality chatbots' ability to understand ambiguous guest requests:"", 'raw': ""Introducing the VagueIntent collection - a new resource for improving hospitality chatbots' ability to understand ambiguous guest requests:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'atyurin/vagueintent-hospitality-intent-classification-challenge-681ba6c4ac8f09f308d1b7f8'}, 'url': 'https://huggingface.co/collections/atyurin/vagueintent-hospitality-intent-classification-challenge-681ba6c4ac8f09f308d1b7f8', 'raw': 'https://huggingface.co/collections/atyurin/vagueintent-hospitality-intent-classification-challenge-681ba6c4ac8f09f308d1b7f8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The collection features two specialized datasets:', 'raw': 'The collection features two specialized datasets:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1) ', 'raw': '1) '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'atyurin/VagueIntent-Train'}, 'url': 'https://huggingface.co/datasets/atyurin/VagueIntent-Train', 'raw': 'https://huggingface.co/datasets/atyurin/VagueIntent-Train'}, {'type': 'text', 'value': ': 400 samples of vague guest queries mapped to 40 distinct hospitality intents', 'raw': ': 400 samples of vague guest queries mapped to 40 distinct hospitality intents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2) ', 'raw': '2) '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'atyurin/VagueIntent-Eval'}, 'url': 'https://huggingface.co/datasets/atyurin/VagueIntent-Eval', 'raw': 'https://huggingface.co/datasets/atyurin/VagueIntent-Eval'}, {'type': 'text', 'value': ': 100 samples for benchmarking model performance', 'raw': ': 100 samples for benchmarking model performance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""These datasets address a common challenge in hospitality AI: understanding what guests want when they don't express it directly. "", 'raw': ""These datasets address a common challenge in hospitality AI: understanding what guests want when they don't express it directly. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For example, ""I\'m concerned about my belongings after I officially conclude my stay"" actually means ""I want a late check-out.""', 'raw': 'For example, ""I\'m concerned about my belongings after I officially conclude my stay"" actually means ""I want a late check-out.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use these datasets to fine-tune language models and improve intent classification for hospitality applications. ', 'raw': 'Use these datasets to fine-tune language models and improve intent classification for hospitality applications. '}]","New Release: VagueIntent Datasets for Hospitality Intent Classification. + +Introducing the VagueIntent collection - a new resource for improving hospitality chatbots' ability to understand ambiguous guest requests: +https://huggingface.co/collections/atyurin/vagueintent-hospitality-intent-classification-challenge-681ba6c4ac8f09f308d1b7f8 + +The collection features two specialized datasets: +1) https://huggingface.co/datasets/atyurin/VagueIntent-Train: 400 samples of vague guest queries mapped to 40 distinct hospitality intents + +2) https://huggingface.co/datasets/atyurin/VagueIntent-Eval: 100 samples for benchmarking model performance + +These datasets address a common challenge in hospitality AI: understanding what guests want when they don't express it directly. + +For example, ""I'm concerned about my belongings after I officially conclude my stay"" actually means ""I want a late check-out."" + +Use these datasets to fine-tune language models and improve intent classification for hospitality applications. ",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-05-12 23:08:35,2025-05-12 23:08:35.144,[],/posts/atyurin/723886389493857,393,"{'language': 'en', 'probability': 0.8364871740341187}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,965868892852606,"[{'type': 'text', 'value': ""If you also tuned into Altman's second congress hearing (first in 2023) along with other AI executives, my takeaway is two words: New Deal (by FDR almost a century ago)."", 'raw': ""If you also tuned into Altman's second congress hearing (first in 2023) along with other AI executives, my takeaway is two words: New Deal (by FDR almost a century ago).""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The causal link is quite fascinating and worthy of a few blogposts or deep research queries, but I won't have more time for this (I really wish so), so here goes."", 'raw': ""The causal link is quite fascinating and worthy of a few blogposts or deep research queries, but I won't have more time for this (I really wish so), so here goes.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* AI workload loves GPUs because they allocate more transistors than CPUs for computing, and pack them by high-bandwidth memory', 'raw': '* AI workload loves GPUs because they allocate more transistors than CPUs for computing, and pack them by high-bandwidth memory'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* More computing in the small physical space -> more power draw and more heat dissipation', 'raw': '* More computing in the small physical space -> more power draw and more heat dissipation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* more heat dissipation -> liquid cooling', 'raw': '* more heat dissipation -> liquid cooling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* new cooling and heavier power draw -> bigger racks (heavier and taller)', 'raw': '* new cooling and heavier power draw -> bigger racks (heavier and taller)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* bigger racks -> (re)building data centers', 'raw': '* bigger racks -> (re)building data centers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* new data centers with higher power demand (peak and stability) -> grid update and nuclear power', 'raw': '* new data centers with higher power demand (peak and stability) -> grid update and nuclear power'}]","If you also tuned into Altman's second congress hearing (first in 2023) along with other AI executives, my takeaway is two words: New Deal (by FDR almost a century ago). + +The causal link is quite fascinating and worthy of a few blogposts or deep research queries, but I won't have more time for this (I really wish so), so here goes. + +* AI workload loves GPUs because they allocate more transistors than CPUs for computing, and pack them by high-bandwidth memory +* More computing in the small physical space -> more power draw and more heat dissipation +* more heat dissipation -> liquid cooling +* new cooling and heavier power draw -> bigger racks (heavier and taller) +* bigger racks -> (re)building data centers +* new data centers with higher power demand (peak and stability) -> grid update and nuclear power",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-05-12 20:56:44,2025-05-12 23:25:41.322,[],/posts/onekq/965868892852606,461,"{'language': 'en', 'probability': 0.929025411605835}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,746841015963520,"[{'type': 'text', 'value': 'Matrix Game 🎮 an interactive foundation model for controllable game world generation, released by Skywork AI.', 'raw': 'Matrix Game 🎮 an interactive foundation model for controllable game world generation, released by Skywork AI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Skywork/Matrix-Game'}, 'url': 'https://huggingface.co/Skywork/Matrix-Game', 'raw': 'https://huggingface.co/Skywork/Matrix-Game'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 17B with MIT licensed', 'raw': '✨ 17B with MIT licensed'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Diffusion-based image-to-world video generation via keyboard & mouse input', 'raw': '✨ Diffusion-based image-to-world video generation via keyboard & mouse input'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ GameWorld Score benchmark for Minecraft world models', 'raw': '✨ GameWorld Score benchmark for Minecraft world models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Massive Matrix Game Dataset with fine-grained action labels', 'raw': '✨ Massive Matrix Game Dataset with fine-grained action labels'}]","Matrix Game 🎮 an interactive foundation model for controllable game world generation, released by Skywork AI. + +https://huggingface.co/Skywork/Matrix-Game + +✨ 17B with MIT licensed +✨ Diffusion-based image-to-world video generation via keyboard & mouse input +✨ GameWorld Score benchmark for Minecraft world models +✨ Massive Matrix Game Dataset with fine-grained action labels","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/LxjTOOpUEhBa9qaVD0cPf.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['ghostai1', 'John6666', 'darkzbaron', 'prithivMLmods', 'linoyts', 'niko91i'], 'count': 6}, {'reaction': '👀', 'users': ['nomadicsynth', 'OrlandoHugBot'], 'count': 2}]",2025-05-12 18:22:50,2025-05-12 18:22:50.476,[],/posts/AdinaY/746841015963520,2523,"{'language': 'en', 'probability': 0.7699910998344421}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,544378273517703,"[{'type': 'text', 'value': 'VLMS 2025 UPDATE 🔥', 'raw': 'VLMS 2025 UPDATE 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We just shipped a blog on everything latest on vision language models, including', 'raw': 'We just shipped a blog on everything latest on vision language models, including'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 GUI agents, agentic VLMs, omni models', 'raw': '🤖 GUI agents, agentic VLMs, omni models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📑 multimodal RAG', 'raw': '📑 multimodal RAG'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⏯️ video LMs', 'raw': '⏯️ video LMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤏🏻 smol models', 'raw': '🤏���� smol models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '..and more! ', 'raw': '..and more! '}, {'type': 'link', 'href': 'https://huggingface.co/blog/vlms-2025', 'raw': 'https://huggingface.co/blog/vlms-2025'}, {'type': 'new_line', 'raw': '\n'}]","VLMS 2025 UPDATE 🔥 + +We just shipped a blog on everything latest on vision language models, including +🤖 GUI agents, agentic VLMs, omni models +📑 multimodal RAG +⏯️ video LMs +🤏🏻 smol models +..and more! https://huggingface.co/blog/vlms-2025 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/zTV6bC38-ZfQEznKvn_Ok.png'}]",[],"[{'reaction': '🔥', 'users': ['merterbak', 'NikhilSandy', 'emaballarin', 'strahlenguy', 'atasoglu', 'g-ronimo', 'ghostai1', 'mrdbourke', 'John6666', 'ariG23498', 'linoyts', 'Sloff1155', 'Cosmobillian', 'prithivMLmods', 'fanatte', 'abdeljalilELmajjodi', 'talaviyabhavik', 'm8x7b'], 'count': 18}]",2025-05-12 16:21:59,2025-05-12 20:35:53.978,"[{'_id': '6421b1c68adc8881b974a89d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png', 'fullname': 'GHOSTAI', 'name': 'ghostai1', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 30, 'isFollowing': False}]",/posts/merve/544378273517703,5043,"{'language': 'en', 'probability': 0.7741794586181641}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,469494202006125,"[{'type': 'text', 'value': 'Officially kicking off my startup today🎉', 'raw': 'Officially kicking off my startup today🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Join me in building the future of learning: Lectūra - an advanced multi-agent software for adaptive personalized learning experience. Research will focus on building tools that empower individual learners to master needed self-taught skills with the help of AI. ', 'raw': 'Join me in building the future of learning: Lectūra - an advanced multi-agent software for adaptive personalized learning experience. Research will focus on building tools that empower individual learners to master needed self-taught skills with the help of AI. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read more: ', 'raw': 'Read more: '}, {'type': 'link', 'href': 'https://lecturalabs.com/', 'raw': 'https://lecturalabs.com/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Feel free to reach out via the mentioned email and follow the official account for updates: ', 'raw': 'Feel free to reach out via the mentioned email and follow the official account for updates: '}, {'type': 'link', 'href': 'https://x.com/lectura_ai', 'raw': 'https://x.com/lectura_ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Curiosity has a voice, let it teach you. Generate Lectures. Customize Instructors. Get Real-time Personalized Learning. ', 'raw': 'Curiosity has a voice, let it teach you. Generate Lectures. Customize Instructors. Get Real-time Personalized Learning. '}]","Officially kicking off my startup today🎉 +Join me in building the future of learning: Lectūra - an advanced multi-agent software for adaptive personalized learning experience. Research will focus on building tools that empower individual learners to master needed self-taught skills with the help of AI. +Read more: https://lecturalabs.com/ +Feel free to reach out via the mentioned email and follow the official account for updates: https://x.com/lectura_ai + +Curiosity has a voice, let it teach you. Generate Lectures. Customize Instructors. Get Real-time Personalized Learning. ","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/5t017l-fkIWd8pNS9LcKS.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/LQaZJTHI8uzeQ3iDzu2yv.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/yjJtrnrEJ5ylfr3O7HLTm.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-05-12 16:11:01,2025-05-14 23:13:42.497,[],/posts/Jaward/469494202006125,429,"{'language': 'en', 'probability': 0.8844591975212097}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d3d84d457647eb547ed73b/0OkUtA79QpP0jhcet6ib4.png,14.0,Enderchef,Enderchef,366998594446946,"[{'type': 'text', 'value': 'From now on, my AI models, ICONN and Sabresooth, will work together to make weekly posts - stay tuned!', 'raw': 'From now on, my AI models, ICONN and Sabresooth, will work together to make weekly posts - stay tuned!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can find ICONN at:', 'raw': 'You can find ICONN at:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/Enderchef/ICONN-0.5-BETA-8B-Distilled-GGUF', 'raw': 'https://huggingface.co/Enderchef/ICONN-0.5-BETA-8B-Distilled-GGUF'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sabresooth is not publicly open yet, stay tuned for posting!', 'raw': 'Sabresooth is not publicly open yet, stay tuned for posting!'}]","From now on, my AI models, ICONN and Sabresooth, will work together to make weekly posts - stay tuned! + +You can find ICONN at: +https://huggingface.co/Enderchef/ICONN-0.5-BETA-8B-Distilled-GGUF + +Sabresooth is not publicly open yet, stay tuned for posting!",[],[],"[{'reaction': '👀', 'users': ['John6666', 'Enderchef', 'natalie5'], 'count': 3}]",2025-05-12 14:51:04,2025-05-12 14:51:04.075,[],/posts/Enderchef/366998594446946,1170,"{'language': 'en', 'probability': 0.8073511719703674}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,587357686300489,"[{'type': 'text', 'value': 'Hey! I built an AI Agent to query the FOIA API for a workshop at the Hacks/Hackers Summit in Baltimore and you can do it too! ', 'raw': 'Hey! I built an AI Agent to query the FOIA API for a workshop at the Hacks/Hackers Summit in Baltimore and you can do it too! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It’s a quick proof of concept to demo what agents can do, how to design workflows, and how to approach the coding side. TWant a fun project to learn how AI agents work? I built one that queries the FOIA API — and you can too!', 'raw': 'It’s a quick proof of concept to demo what agents can do, how to design workflows, and how to approach the coding side. TWant a fun project to learn how AI agents work? I built one that queries the FOIA API — and you can too!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's a quick proof of concept I did for a workshop at the Hacks/Hackers Summit in Baltimore, demonstrating what agents can do, how to design workflows, and approaches to coding them."", 'raw': ""It's a quick proof of concept I did for a workshop at the Hacks/Hackers Summit in Baltimore, demonstrating what agents can do, how to design workflows, and approaches to coding them.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Slides ', 'raw': '- Slides '}, {'type': 'link', 'href': 'https://docs.google.com/presentation/d/1lbf5K0yi213N7uxGnVKJdGWq2i0GayWj4vIcLkVlwD8/edit?usp=sharing', 'raw': 'https://docs.google.com/presentation/d/1lbf5K0yi213N7uxGnVKJdGWq2i0GayWj4vIcLkVlwD8/edit?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Colab notebook ', 'raw': '- Colab notebook '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1iw0qZyTni_6BcK0jj1x6gTfjm85NlaGv', 'raw': 'https://colab.research.google.com/drive/1iw0qZyTni_6BcK0jj1x6gTfjm85NlaGv'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Gradio app: ', 'raw': '- Gradio app: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/JournalistsonHF/foia-agent', 'raw': 'https://huggingface.co/spaces/JournalistsonHF/foia-agent'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MCP version to plug into Claude, Cursor, etc: ', 'raw': '- MCP version to plug into Claude, Cursor, etc: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/JournalistsonHF/foia-mcp-tools', 'raw': 'https://huggingface.co/spaces/JournalistsonHF/foia-mcp-tools'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Feel free to use the Gradio app for real FOIA requests, but also to improve it (I'm far from being a good coder) or adapt it for other countries."", 'raw': ""Feel free to use the Gradio app for real FOIA requests, but also to improve it (I'm far from being a good coder) or adapt it for other countries.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And shout-out to everyone who powered through the workshop! 😅', 'raw': 'And shout-out to everyone who powered through the workshop! 😅'}]","Hey! I built an AI Agent to query the FOIA API for a workshop at the Hacks/Hackers Summit in Baltimore and you can do it too! + +It’s a quick proof of concept to demo what agents can do, how to design workflows, and how to approach the coding side. TWant a fun project to learn how AI agents work? I built one that queries the FOIA API — and you can too! + +It's a quick proof of concept I did for a workshop at the Hacks/Hackers Summit in Baltimore, demonstrating what agents can do, how to design workflows, and approaches to coding them. + +- Slides https://docs.google.com/presentation/d/1lbf5K0yi213N7uxGnVKJdGWq2i0GayWj4vIcLkVlwD8/edit?usp=sharing +- Colab notebook https://colab.research.google.com/drive/1iw0qZyTni_6BcK0jj1x6gTfjm85NlaGv +- Gradio app: https://huggingface.co/spaces/JournalistsonHF/foia-agent +- MCP version to plug into Claude, Cursor, etc: https://huggingface.co/spaces/JournalistsonHF/foia-mcp-tools + +Feel free to use the Gradio app for real FOIA requests, but also to improve it (I'm far from being a good coder) or adapt it for other countries. + +And shout-out to everyone who powered through the workshop! 😅","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/sGueFPCmOfgRTcWdS-3QL.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'Ameeeee', 'SaylorTwift', 'YiSHuA'], 'count': 4}]",2025-05-12 12:38:17,2025-05-13 02:52:36.159,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/fdaudens/587357686300489,802,"{'language': 'en', 'probability': 0.8797121644020081}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/0Ye_WpYpFyNw2qRGJ5L93.png,23.0,VirtuOasis,VirtualOasis,965866013655862,"[{'type': 'text', 'value': 'Automatic Multi-Modal Research Agent', 'raw': 'Automatic Multi-Modal Research Agent'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I am thinking of building an Automatic Research Agent that can boost creativity! ', 'raw': 'I am thinking of building an Automatic Research Agent that can boost creativity! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Input: Topics or data sources', 'raw': 'Input: Topics or data sources'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Processing: Automated deep research', 'raw': 'Processing: Automated deep research'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Output: multimodal results (such as reports, videos, audio, diagrams) & multi-platform publishing.', 'raw': 'Output: multimodal results (such as reports, videos, audio, diagrams) & multi-platform publishing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'There is a three-stage process', 'raw': 'There is a three-stage process'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the initial Stage, output for text-based content in markdown format allows for user review before transformation into various other formats, such as PDF or HTML.', 'raw': 'In the initial Stage, output for text-based content in markdown format allows for user review before transformation into various other formats, such as PDF or HTML.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The second stage transforms the output into other modalities, like audio, video, diagrams, and translations into different languages.', 'raw': 'The second stage transforms the output into other modalities, like audio, video, diagrams, and translations into different languages.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The final stage focuses on publishing multi-modal content across multiple platforms like X, GitHub, Hugging Face, YouTube, and podcasts, etc.', 'raw': 'The final stage focuses on publishing multi-modal content across multiple platforms like X, GitHub, Hugging Face, YouTube, and podcasts, etc.'}]","Automatic Multi-Modal Research Agent +I am thinking of building an Automatic Research Agent that can boost creativity! + +Input: Topics or data sources +Processing: Automated deep research +Output: multimodal results (such as reports, videos, audio, diagrams) & multi-platform publishing. + +There is a three-stage process +In the initial Stage, output for text-based content in markdown format allows for user review before transformation into various other formats, such as PDF or HTML. + +The second stage transforms the output into other modalities, like audio, video, diagrams, and translations into different languages. + +The final stage focuses on publishing multi-modal content across multiple platforms like X, GitHub, Hugging Face, YouTube, and podcasts, etc.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a7901f3bb0e70b41c48805/thsierebOybiKV3Lp2ebj.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a7901f3bb0e70b41c48805/WlTvpp7b5uXKP8YC4ksLP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a7901f3bb0e70b41c48805/RX8NbBn_jE3uSXRcVNdb9.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a7901f3bb0e70b41c48805/9iWzCdAIlEMS5GNhk6J90.png'}]",[],"[{'reaction': '🔥', 'users': ['abhi170198', 'djoums', 'VirtualOasis', 'Muavviz', 'John6666', 'plmsmile'], 'count': 6}, {'reaction': '😎', 'users': ['VirtualOasis'], 'count': 1}, {'reaction': '➕', 'users': ['abhi170198'], 'count': 1}, {'reaction': '🤗', 'users': ['abhi170198'], 'count': 1}, {'reaction': '🚀', 'users': ['abhi170198'], 'count': 1}, {'reaction': '❤️', 'users': ['abhi170198'], 'count': 1}, {'reaction': '🧠', 'users': ['abhi170198'], 'count': 1}, {'reaction': '👍', 'users': ['abhi170198'], 'count': 1}, {'reaction': '👀', 'users': ['abhi170198'], 'count': 1}]",2025-05-12 12:28:16,2025-05-12 12:28:16.628,[],/posts/VirtualOasis/965866013655862,2619,"{'language': 'en', 'probability': 0.8400352001190186}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,254248082050134,"[{'type': 'text', 'value': 'NEW RELEASE: Esper 3 for Qwen 3!', 'raw': 'NEW RELEASE: Esper 3 for Qwen 3!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- A full-stack software assistant: a reasoning finetune focused on coding, architecture, and DevOps using the Titanium and Tachibana datasets!', 'raw': '- A full-stack software assistant: a reasoning finetune focused on coding, architecture, and DevOps using the Titanium and Tachibana datasets!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Improved general and creative reasoning skills, powered by the Raiden dataset.', 'raw': '- Improved general and creative reasoning skills, powered by the Raiden dataset.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4B model: ', 'raw': '4B model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ValiantLabs/Qwen3-4B-Esper3'}, 'url': 'https://huggingface.co/ValiantLabs/Qwen3-4B-Esper3', 'raw': 'https://huggingface.co/ValiantLabs/Qwen3-4B-Esper3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '8B model: ', 'raw': '8B model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ValiantLabs/Qwen3-8B-Esper3'}, 'url': 'https://huggingface.co/ValiantLabs/Qwen3-8B-Esper3', 'raw': 'https://huggingface.co/ValiantLabs/Qwen3-8B-Esper3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We'll also be bringing Esper 3 to larger Qwen 3 models as soon as we can - if you want these, consider helping us out: "", 'raw': ""We'll also be bringing Esper 3 to larger Qwen 3 models as soon as we can - if you want these, consider helping us out: ""}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'sequelbox/SupportOpenSource'}, 'url': 'https://huggingface.co/spaces/sequelbox/SupportOpenSource', 'raw': 'https://huggingface.co/spaces/sequelbox/SupportOpenSource'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More models and datasets to come soon!', 'raw': 'More models and datasets to come soon!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'with my love and enthusiasm,', 'raw': 'with my love and enthusiasm,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'allegra', 'raw': 'allegra'}]","NEW RELEASE: Esper 3 for Qwen 3! + +- A full-stack software assistant: a reasoning finetune focused on coding, architecture, and DevOps using the Titanium and Tachibana datasets! +- Improved general and creative reasoning skills, powered by the Raiden dataset. + +4B model: https://huggingface.co/ValiantLabs/Qwen3-4B-Esper3 +8B model: https://huggingface.co/ValiantLabs/Qwen3-8B-Esper3 + +We'll also be bringing Esper 3 to larger Qwen 3 models as soon as we can - if you want these, consider helping us out: https://huggingface.co/spaces/sequelbox/SupportOpenSource + +More models and datasets to come soon! + +with my love and enthusiasm, +allegra",[],[],"[{'reaction': '👀', 'users': ['John6666', 'bolenath', 'djuna', 'Fishtiks', 'p789'], 'count': 5}, {'reaction': '🚀', 'users': ['zoeywin'], 'count': 1}]",2025-05-07 02:01:40,2025-05-12 16:30:35.830,[],/posts/sequelbox/254248082050134,2692,"{'language': 'en', 'probability': 0.8497692346572876}",1 +/avatars/40e3f3bd6a4fa7831c0735e6cd418316.svg,,João Dutra,Dutra0530,167592811581856,"[{'type': 'text', 'value': 'Guys, every time I try to change something, or save my project, this strange error appears. Does anyone know how to solve it?', 'raw': 'Guys, every time I try to change something, or save my project, this strange error appears. Does anyone know how to solve it?'}]","Guys, every time I try to change something, or save my project, this strange error appears. Does anyone know how to solve it?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6810de715288937934c8ef0b/6YubFbeTRQ6pQOalTY9rb.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'YoussefSharawy91', 'maxfad123'], 'count': 3}]",2025-05-06 23:57:51,2025-05-08 11:47:59.515,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '64f08424aeffc7ca5d4f1e81', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f08424aeffc7ca5d4f1e81/qJUVtCvuN9rXacrc-M4bi.jpeg', 'fullname': 'Youssef Elshaarawy', 'name': 'YoussefSharawy91', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6810de715288937934c8ef0b', 'avatarUrl': '/avatars/40e3f3bd6a4fa7831c0735e6cd418316.svg', 'fullname': 'João Dutra', 'name': 'Dutra0530', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '67b9e7224293ac00c37178ab', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/uTOgGQgo6qJWhBaLQcdoM.png', 'fullname': 'Howell Munene', 'name': 'Howie254', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/Dutra0530/167592811581856,2888,"{'language': 'en', 'probability': 0.9633224606513977}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,191562047619282,"[{'type': 'text', 'value': ""What are you using to evaluate models or AI systems? So far we're building lighteval & leaderboards on the hub but still feels early & a lot more to build. What would be useful to you?"", 'raw': ""What are you using to evaluate models or AI systems? So far we're building lighteval & leaderboards on the hub but still feels early & a lot more to build. What would be useful to you?""}]",What are you using to evaluate models or AI systems? So far we're building lighteval & leaderboards on the hub but still feels early & a lot more to build. What would be useful to you?,[],[],"[{'reaction': '❤️', 'users': ['takarajordan', 'onekq', 'John6666', 'YoussefSharawy91', 'Dcas89', 'tomlee3ddesign'], 'count': 6}]",2025-05-06 18:24:02,2025-05-07 23:09:11.147,"[{'_id': '6613f7ae43c4456e13ecbdcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg', 'fullname': 'Jordan Legg', 'name': 'takarajordan', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 40, 'isFollowing': False}, {'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '65a488b5224f96d8cc3754fc', 'avatarUrl': '/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg', 'fullname': 'Emin Temiz', 'name': 'etemiz', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 46, 'isFollowing': False}, {'_id': '643ac5d2e2b979ae6144d68c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png', 'fullname': 'nyuuzyou', 'name': 'nyuuzyou', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244, 'isFollowing': False}, {'_id': '64f08424aeffc7ca5d4f1e81', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f08424aeffc7ca5d4f1e81/qJUVtCvuN9rXacrc-M4bi.jpeg', 'fullname': 'Youssef Elshaarawy', 'name': 'YoussefSharawy91', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/clem/191562047619282,4077,"{'language': 'en', 'probability': 0.9639672040939331}",6 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,892703768040278,"[{'type': 'text', 'value': 'ACE-Step 🎵 a music generation foundation model released by ', 'raw': 'ACE-Step 🎵 a music generation foundation model released by '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'StepFun & ACEStudio', 'raw': 'StepFun & ACEStudio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ACE-Step/ACE-Step-v1-3.5B'}, 'url': 'https://huggingface.co/ACE-Step/ACE-Step-v1-3.5B', 'raw': 'https://huggingface.co/ACE-Step/ACE-Step-v1-3.5B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ACE-Step/ACE-Step'}, 'url': 'https://huggingface.co/spaces/ACE-Step/ACE-Step', 'raw': 'https://huggingface.co/spaces/ACE-Step/ACE-Step'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 3.5B, Apache2.0 licensed', 'raw': '✨ 3.5B, Apache2.0 licensed'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 115× faster than LLMs (4-min music in 20s on A100)', 'raw': '✨ 115× faster than LLMs (4-min music in 20s on A100)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Diffusion + DCAE + linear transformer = speed + coherence', 'raw': '✨ Diffusion + DCAE + linear transformer = speed + coherence'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Supports voice cloning, remixing, lyric editing & more', 'raw': '✨ Supports voice cloning, remixing, lyric editing & more'}]","ACE-Step 🎵 a music generation foundation model released by +StepFun & ACEStudio + +Model: https://huggingface.co/ACE-Step/ACE-Step-v1-3.5B +Demo: https://huggingface.co/spaces/ACE-Step/ACE-Step + +✨ 3.5B, Apache2.0 licensed +✨ 115× faster than LLMs (4-min music in 20s on A100) +✨ Diffusion + DCAE + linear transformer = speed + coherence +✨ Supports voice cloning, remixing, lyric editing & more","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/E6C-826w6lLFwcwQFGRAC.jpeg'}]",[],"[{'reaction': '😎', 'users': ['Fishtiks', 'John6666', 'YaTharThShaRma999', 'linoyts', 'JohnRoger', 'eramax', '9voltfan2009', 'nordegraph'], 'count': 8}]",2025-05-06 16:46:05,2025-05-07 19:05:30.766,"[{'_id': '6421b1c68adc8881b974a89d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6421b1c68adc8881b974a89d/faE0x7dQ8r0CwXAeOpL0N.png', 'fullname': 'GHOSTAI', 'name': 'ghostai1', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 30, 'isFollowing': False}]",/posts/AdinaY/892703768040278,3941,"{'language': 'en', 'probability': 0.7937390804290771}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,363670261444169,"[{'type': 'text', 'value': 'CCI4.0-M2 📊 A powerful dataset with 3 specialized subsets, released by ', 'raw': 'CCI4.0-M2 📊 A powerful dataset with 3 specialized subsets, released by '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'BAAIBeijing', 'raw': 'BAAIBeijing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'BAAI/cci40-68199d90bbc798680df16d7c'}, 'url': 'https://huggingface.co/collections/BAAI/cci40-68199d90bbc798680df16d7c', 'raw': 'https://huggingface.co/collections/BAAI/cci40-68199d90bbc798680df16d7c'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ M2-Base: 3.5TB web data (EN/ZH), with LLM-augmented content, APACHE2.0', 'raw': '✨ M2-Base: 3.5TB web data (EN/ZH), with LLM-augmented content, APACHE2.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ M2-CoT: 4.2TB of auto-synthesized CoT reasoning data', 'raw': '✨ M2-CoT: 4.2TB of auto-synthesized CoT reasoning data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ M2-Extra: domain-specific knowledge', 'raw': '✨ M2-Extra: domain-specific knowledge'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","CCI4.0-M2 📊 A powerful dataset with 3 specialized subsets, released by +BAAIBeijing + +https://huggingface.co/collections/BAAI/cci40-68199d90bbc798680df16d7c + +✨ M2-Base: 3.5TB web data (EN/ZH), with LLM-augmented content, APACHE2.0 +✨ M2-CoT: 4.2TB of auto-synthesized CoT reasoning data +✨ M2-Extra: domain-specific knowledge + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/3Iz4rf58u7GZih5fDVw8y.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-05-06 16:38:34,2025-05-06 16:43:11.486,[],/posts/AdinaY/363670261444169,848,"{'language': 'en', 'probability': 0.7057577967643738}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,742287367367358,"[{'type': 'text', 'value': ""A ton of impactful models and datasets in open AI past week, let's summarize the best 🤩 "", 'raw': ""A ton of impactful models and datasets in open AI past week, let's summarize the best 🤩 ""}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/releases-apr-21-and-may-2-6819dcc84da4190620f448a3'}, 'url': 'https://huggingface.co/collections/merve/releases-apr-21-and-may-2-6819dcc84da4190620f448a3', 'raw': 'https://huggingface.co/collections/merve/releases-apr-21-and-may-2-6819dcc84da4190620f448a3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💬 Qwen made it rain! They released Qwen3: new dense and MoE models ranging from 0.6B to 235B 🤯 as well as Qwen2.5-Omni, any-to-any model in 3B and 7B!', 'raw': '💬 Qwen made it rain! They released Qwen3: new dense and MoE models ranging from 0.6B to 235B 🤯 as well as Qwen2.5-Omni, any-to-any model in 3B and 7B!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Microsoft AI released Phi4 reasoning models (that also come in mini and plus sizes)', 'raw': '> Microsoft AI released Phi4 reasoning models (that also come in mini and plus sizes)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> NVIDIA released new CoT reasoning datasets', 'raw': '> NVIDIA released new CoT reasoning datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ > ByteDance released UI-TARS-1.5, native multimodal UI parsing agentic model', 'raw': '🖼️ > ByteDance released UI-TARS-1.5, native multimodal UI parsing agentic model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Meta released EdgeTAM, an on-device object tracking model (SAM2 variant)', 'raw': '> Meta released EdgeTAM, an on-device object tracking model (SAM2 variant)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗣️ NVIDIA released parakeet-tdt-0.6b-v2, a smol 600M automatic speech recognition model', 'raw': '🗣️ NVIDIA released parakeet-tdt-0.6b-v2, a smol 600M automatic speech recognition model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Nari released Dia, a 1.6B text-to-speech model', 'raw': '> Nari released Dia, a 1.6B text-to-speech model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Moonshot AI released Kimi Audio, a new audio understanding, generation, conversation model', 'raw': '> Moonshot AI released Kimi Audio, a new audio understanding, generation, conversation model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👩🏻\u200d💻 JetBrains released Melium models in base and SFT for coding', 'raw': '👩🏻\u200d💻 JetBrains released Melium models in base and SFT for coding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Tesslate released UIGEN-T2-7B, a new text-to-frontend-code model 🤩', 'raw': '> Tesslate released UIGEN-T2-7B, a new text-to-frontend-code model 🤩'}]","A ton of impactful models and datasets in open AI past week, let's summarize the best 🤩 https://huggingface.co/collections/merve/releases-apr-21-and-may-2-6819dcc84da4190620f448a3 + +💬 Qwen made it rain! They released Qwen3: new dense and MoE models ranging from 0.6B to 235B 🤯 as well as Qwen2.5-Omni, any-to-any model in 3B and 7B! +> Microsoft AI released Phi4 reasoning models (that also come in mini and plus sizes) +> NVIDIA released new CoT reasoning datasets +🖼️ > ByteDance released UI-TARS-1.5, native multimodal UI parsing agentic model +> Meta released EdgeTAM, an on-device object tracking model (SAM2 variant) +🗣️ NVIDIA released parakeet-tdt-0.6b-v2, a smol 600M automatic speech recognition model +> Nari released Dia, a 1.6B text-to-speech model +> Moonshot AI released Kimi Audio, a new audio understanding, generation, conversation model +👩🏻‍💻 JetBrains released Melium models in base and SFT for coding +> Tesslate released UIGEN-T2-7B, a new text-to-frontend-code model 🤩","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/3RHB1fOHt-CcT4fM6XvY4.png'}]",[],"[{'reaction': '🔥', 'users': ['roldanjorge', 'BasitMustafa', 'lserinol', 'ghostai1', 'John6666', 'AdinaY', 'Dcas89', 'E7Reine', 'Dendrobatidae', 'jsulz', 'tensiondriven'], 'count': 11}]",2025-05-06 16:06:03,2025-05-06 16:06:03.280,[],/posts/merve/742287367367358,5076,"{'language': 'en', 'probability': 0.855323851108551}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/653772fed7616b72795db894/XGl4VrCdVL1GmgOnSwmIZ.jpeg,33.0,Bertrand Charpentier,sharpenb,996977777058725,"[{'type': 'text', 'value': 'How to learn about efficient AI? - Happy to announce the Awesome AI Efficiency repo that gathers a curated list of 100+ materials to understand the challenges and solutions in making AI faster, smaller, cheaper, greener. ', 'raw': 'How to learn about efficient AI? - Happy to announce the Awesome AI Efficiency repo that gathers a curated list of 100+ materials to understand the challenges and solutions in making AI faster, smaller, cheaper, greener. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 It is designed for a **large audience** including beginners, decision-makers, engineers, and researchers.', 'raw': '🚀 It is designed for a **large audience** including beginners, decision-makers, engineers, and researchers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 It contains **diverse materials** with newspaper articles, blogs, tools, tech reports, research papers, books, and lectures.', 'raw': '📚 It contains **diverse materials** with newspaper articles, blogs, tools, tech reports, research papers, books, and lectures.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is an ongoing project. Do not hesitate to share your feedback/suggestions and star the repo! 🌟', 'raw': 'This is an ongoing project. Do not hesitate to share your feedback/suggestions and star the repo! 🌟'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/PrunaAI/awesome-ai-efficiency', 'raw': 'https://github.com/PrunaAI/awesome-ai-efficiency'}]","How to learn about efficient AI? - Happy to announce the Awesome AI Efficiency repo that gathers a curated list of 100+ materials to understand the challenges and solutions in making AI faster, smaller, cheaper, greener. + +🚀 It is designed for a **large audience** including beginners, decision-makers, engineers, and researchers. +📚 It contains **diverse materials** with newspaper articles, blogs, tools, tech reports, research papers, books, and lectures. + +This is an ongoing project. Do not hesitate to share your feedback/suggestions and star the repo! 🌟 + +https://github.com/PrunaAI/awesome-ai-efficiency",[],[],"[{'reaction': '🧠', 'users': ['solongeran', 'alibasit', 'Dendrobatidae', 'loulou2'], 'count': 4}, {'reaction': '🚀', 'users': ['daniel-ltw', 'John6666', 'sharpenb'], 'count': 3}, {'reaction': '👍', 'users': ['daniel-ltw'], 'count': 1}]",2025-05-06 13:53:30,2025-05-08 11:20:59.823,"[{'_id': '681a1790970ac9eabc07dd8d', 'avatarUrl': '/avatars/9a6a3a31df0789eaed03158d185c5257.svg', 'fullname': 'abu maryam rahmat', 'name': 'abumaryamrahmat', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '677c20c21b7fb114c5a91b99', 'avatarUrl': '/avatars/1e6d37bbc1685aa88e584168c676e5fc.svg', 'fullname': 'Oğuzhan', 'name': 'Dendrobatidae', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/sharpenb/996977777058725,3166,"{'language': 'en', 'probability': 0.9074061512947083}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/XU7EZuyrAoMabGGueQOcH.jpeg,61.0,Zhang Zechuan,RiverZ,221754259422855,"[{'type': 'text', 'value': ""🔥 We're thrilled to share some exciting news about ICEdit! Currently, ICEdit app ("", 'raw': ""🔥 We're thrilled to share some exciting news about ICEdit! Currently, ICEdit app (""}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'RiverZ/ICEdit'}, 'url': 'https://huggingface.co/spaces/RiverZ/ICEdit', 'raw': 'https://huggingface.co/spaces/RiverZ/ICEdit'}, {'type': 'text', 'value': "") has soared to the second place on the weekly trend list of Hugging Face Space, just trailing behind Qwen3. What's more, it also holds the second position on the overall space trend list. This achievement wouldn't have been possible without your incredible support and love. A huge thank you to each and every one of you❤!"", 'raw': "") has soared to the second place on the weekly trend list of Hugging Face Space, just trailing behind Qwen3. What's more, it also holds the second position on the overall space trend list. This achievement wouldn't have been possible without your incredible support and love. A huge thank you to each and every one of you❤!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🎉 The ICEdit community has been incredibly active, and we've seen a plethora of amazing ComfyUI workflows being shared. For instance, with the help of ComfyUI - nunchaku, you can run ICEdit locally with just 4GB of VRAM. This makes it much more accessible for those with limited hardware resources."", 'raw': ""🎉 The ICEdit community has been incredibly active, and we've seen a plethora of amazing ComfyUI workflows being shared. For instance, with the help of ComfyUI - nunchaku, you can run ICEdit locally with just 4GB of VRAM. This makes it much more accessible for those with limited hardware resources.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🎇 If you're interested in the detailed information, please head over to our repository. We highly encourage you to give these workflows a try and explore the creative possibilities that ICEdit offers."", 'raw': ""🎇 If you're interested in the detailed information, please head over to our repository. We highly encourage you to give these workflows a try and explore the creative possibilities that ICEdit offers.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Github Repo: ', 'raw': 'Github Repo: '}, {'type': 'link', 'href': 'https://github.com/River-Zhang/ICEdit', 'raw': 'https://github.com/River-Zhang/ICEdit'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hugging Face Space: ', 'raw': 'Hugging Face Space: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'RiverZ/ICEdit'}, 'url': 'https://huggingface.co/spaces/RiverZ/ICEdit', 'raw': 'https://huggingface.co/spaces/RiverZ/ICEdit'}]","🔥 We're thrilled to share some exciting news about ICEdit! Currently, ICEdit app (https://huggingface.co/spaces/RiverZ/ICEdit) has soared to the second place on the weekly trend list of Hugging Face Space, just trailing behind Qwen3. What's more, it also holds the second position on the overall space trend list. This achievement wouldn't have been possible without your incredible support and love. A huge thank you to each and every one of you❤! + +🎉 The ICEdit community has been incredibly active, and we've seen a plethora of amazing ComfyUI workflows being shared. For instance, with the help of ComfyUI - nunchaku, you can run ICEdit locally with just 4GB of VRAM. This makes it much more accessible for those with limited hardware resources. + +🎇 If you're interested in the detailed information, please head over to our repository. We highly encourage you to give these workflows a try and explore the creative possibilities that ICEdit offers. + +Github Repo: https://github.com/River-Zhang/ICEdit +Hugging Face Space: https://huggingface.co/spaces/RiverZ/ICEdit","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6513e6d72f79557a90d79dab/yaCH0IAlNooYXDzPasHCR.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6513e6d72f79557a90d79dab/R5-j3dY0-aTi7nMD7GiNm.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6513e6d72f79557a90d79dab/BEfv5Bnu0zx6Ma8h0eCis.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6513e6d72f79557a90d79dab/iWzZWCc9wgZpToL8kem05.png'}]",[],"[{'reaction': '🤗', 'users': ['RiverZ', 'linoyts', 'ghostai1', 'John6666', 'mertbozkurt', 'eaddario', 'TheDrunkenSnail', 'multimodalart', 'mmx31', 'filipstrand'], 'count': 10}, {'reaction': '❤️', 'users': ['Chief-Inspector', 'UnderController', 'abdeljalilELmajjodi', 'multimodalart', 'ianyeung', 'RiverZ'], 'count': 6}]",2025-05-06 12:33:04,2025-05-06 12:33:04.717,[],/posts/RiverZ/221754259422855,6753,"{'language': 'en', 'probability': 0.9357439279556274}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/0Ye_WpYpFyNw2qRGJ5L93.png,23.0,VirtuOasis,VirtualOasis,885212606719735,"[{'type': 'text', 'value': 'Agents vs. Workflows', 'raw': 'Agents vs. Workflows'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Agents are systems where LLMs dynamically direct their processes and tool usage, maintaining control over how they accomplish tasks. ', 'raw': 'Agents are systems where LLMs dynamically direct their processes and tool usage, maintaining control over how they accomplish tasks. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Workflows are through predefined code paths, ensuring that each step is executed in a deterministic manner.', 'raw': 'Workflows are through predefined code paths, ensuring that each step is executed in a deterministic manner.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Agents are like smart assistants that can think on their own. They understand situations, make decisions, and act, whatever the task is new or unpredictable. Think of the Agent as a chef who can make a meal based on what they have.', 'raw': 'Agents are like smart assistants that can think on their own. They understand situations, make decisions, and act, whatever the task is new or unpredictable. Think of the Agent as a chef who can make a meal based on what they have.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Workflows are like a recipe with fixed steps. They’re a series of tasks done in order, like following a checklist for approving a loan. They’re great for tasks that don’t change much.', 'raw': 'Workflows are like a recipe with fixed steps. They’re a series of tasks done in order, like following a checklist for approving a loan. They’re great for tasks that don’t change much.'}]","Agents vs. Workflows +Agents are systems where LLMs dynamically direct their processes and tool usage, maintaining control over how they accomplish tasks. +Workflows are through predefined code paths, ensuring that each step is executed in a deterministic manner. + +Agents are like smart assistants that can think on their own. They understand situations, make decisions, and act, whatever the task is new or unpredictable. Think of the Agent as a chef who can make a meal based on what they have. + +Workflows are like a recipe with fixed steps. They’re a series of tasks done in order, like following a checklist for approving a loan. They’re great for tasks that don’t change much.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a7901f3bb0e70b41c48805/iYdFJfOSu2vf5gRckI4GD.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a7901f3bb0e70b41c48805/TWDwQLR5wPLABBM6GmYEG.png'}]",[],"[{'reaction': '👍', 'users': ['CarlAinsworth456', 'med4u', 'John6666', 'VirtualOasis', 'vrugutuhu', 'mgor', 'reira24', 'BennyDaBall', 'zyziszy', 'gladiatorsociety'], 'count': 10}, {'reaction': '🧠', 'users': ['solongeran', 'VirtualOasis', 'reira24'], 'count': 3}, {'reaction': '🚀', 'users': ['VirtualOasis', 'reira24'], 'count': 2}, {'reaction': '🤗', 'users': ['VirtualOasis', 'reira24'], 'count': 2}]",2025-05-06 10:42:47,2025-05-09 03:20:40.430,[],/posts/VirtualOasis/885212606719735,3105,"{'language': 'en', 'probability': 0.9600669741630554}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/658a4c914bb41498f7d5e3ca/zMJjxfazi9ePc7GZ1jRAE.jpeg,66.0,Pro Creations,ProCreations,520081083582146,"[{'type': 'text', 'value': '🚨 NEW DATASET ALERT 🚨', 'raw': '🚨 NEW DATASET ALERT 🚨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Come check out', 'raw': 'Come check out'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'ProCreations/black-hole-sim-randomized'}, 'url': 'https://huggingface.co/datasets/ProCreations/black-hole-sim-randomized', 'raw': 'https://huggingface.co/datasets/ProCreations/black-hole-sim-randomized'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'a high-fidelity dataset with 400,000+ randomized black hole simulations — packed with relativistic metrics, Kerr geometry, and GR weirdness to help AIs actually understand physics.', 'raw': 'a high-fidelity dataset with 400,000+ randomized black hole simulations — packed with relativistic metrics, Kerr geometry, and GR weirdness to help AIs actually understand physics.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🕳️ Teach your model:', 'raw': '🕳️ Teach your model:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tTime dilation', 'raw': '\t•\tTime dilation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tRedshift', 'raw': '\t•\tRedshift'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tOrbital dynamics', 'raw': '\t•\tOrbital dynamics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tFrame dragging', 'raw': '\t•\tFrame dragging'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tFull Kerr tensors', 'raw': '\t•\tFull Kerr tensors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '…and more, all in raw JSONL!', 'raw': '…and more, all in raw JSONL!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This release celebrates SimpleMath hitting 200 downloads — thank you all so much for the support! 🙌', 'raw': 'This release celebrates SimpleMath hitting 200 downloads — thank you all so much for the support! 🙌'}]","🚨 NEW DATASET ALERT 🚨 + +Come check out +https://huggingface.co/datasets/ProCreations/black-hole-sim-randomized +a high-fidelity dataset with 400,000+ randomized black hole simulations — packed with relativistic metrics, Kerr geometry, and GR weirdness to help AIs actually understand physics. + +🕳️ Teach your model: + • Time dilation + • Redshift + • Orbital dynamics + • Frame dragging + • Full Kerr tensors +…and more, all in raw JSONL! + +This release celebrates SimpleMath hitting 200 downloads — thank you all so much for the support! 🙌",[],[],"[{'reaction': '🧠', 'users': ['John6666'], 'count': 1}]",2025-05-06 07:42:19,2025-05-06 07:42:19.514,[],/posts/ProCreations/520081083582146,713,"{'language': 'en', 'probability': 0.7146899104118347}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6567aec66c1ae87d4ec42272/2OfjRgfNeURrYH6noQkeZ.jpeg,103.0,Mert Erbak,merterbak,465113700174187,"[{'type': 'text', 'value': ""Microsoft released their new fine-tuned phi-4 models with reasoning data yesterday. They outperform/rival much larger models . Check out them if you haven't yet. 🚀"", 'raw': ""Microsoft released their new fine-tuned phi-4 models with reasoning data yesterday. They outperform/rival much larger models . Check out them if you haven't yet. 🚀""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Phi4 mini reasoning(SFT): ', 'raw': 'Phi4 mini reasoning(SFT): '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-4-mini-reasoning'}, 'url': 'https://huggingface.co/microsoft/Phi-4-mini-reasoning', 'raw': 'https://huggingface.co/microsoft/Phi-4-mini-reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Phi-4 reasoning(SFT): ', 'raw': 'Phi-4 reasoning(SFT): '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-4-reasoning'}, 'url': 'https://huggingface.co/microsoft/Phi-4-reasoning', 'raw': 'https://huggingface.co/microsoft/Phi-4-reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Phi-4 reasoning plus (SFT + RL): ', 'raw': 'Phi-4 reasoning plus (SFT + RL): '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-4-reasoning-plus'}, 'url': 'https://huggingface.co/microsoft/Phi-4-reasoning-plus', 'raw': 'https://huggingface.co/microsoft/Phi-4-reasoning-plus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'link', 'href': 'https://github.com/marketplace/models/azureml/Phi-4-reasoning/playground', 'raw': 'https://github.com/marketplace/models/azureml/Phi-4-reasoning/playground'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Articles: ', 'raw': 'Articles: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2504.21318', 'raw': 'https://arxiv.org/pdf/2504.21318'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2504.21233', 'raw': 'https://arxiv.org/pdf/2504.21233'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://azure.microsoft.com/en-us/blog/one-year-of-phi-small-language-models-making-big-leaps-in-ai/', 'raw': 'https://azure.microsoft.com/en-us/blog/one-year-of-phi-small-language-models-making-big-leaps-in-ai/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Microsoft released their new fine-tuned phi-4 models with reasoning data yesterday. They outperform/rival much larger models . Check out them if you haven't yet. 🚀 + +Phi4 mini reasoning(SFT): https://huggingface.co/microsoft/Phi-4-mini-reasoning +Phi-4 reasoning(SFT): https://huggingface.co/microsoft/Phi-4-reasoning +Phi-4 reasoning plus (SFT + RL): https://huggingface.co/microsoft/Phi-4-reasoning-plus +Demo: https://github.com/marketplace/models/azureml/Phi-4-reasoning/playground +Articles: https://arxiv.org/pdf/2504.21318 +https://arxiv.org/pdf/2504.21233 +Blog: https://azure.microsoft.com/en-us/blog/one-year-of-phi-small-language-models-making-big-leaps-in-ai/ + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6567aec66c1ae87d4ec42272/khb8XjejLdq0c2aEPLy8v.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6567aec66c1ae87d4ec42272/pcBkwHxqBHn0uOmc7rTxO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6567aec66c1ae87d4ec42272/hIVTOAg2_dhDGkFfZwcee.png'}]",[],"[{'reaction': '🔥', 'users': ['merterbak', 'denisbay', 'ProCreations', 'John6666', 'Cody0565', 's3nh'], 'count': 6}, {'reaction': '🚀', 'users': ['merterbak', 'denisbay', 'Cody0565'], 'count': 3}]",2025-05-01 21:07:30,2025-05-03 06:51:18.936,"[{'_id': '656e3808d4de03a07d116850', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg', 'fullname': 'Kenneth Hamilton', 'name': 'ZennyKenny', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 77, 'isFollowing': False}]",/posts/merterbak/465113700174187,1699,"{'language': 'en', 'probability': 0.7402443289756775}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/Xf7WcP8VWHIIJbqNVo0jG.png,,Omar,omaragent03,396977978461101,"[{'type': 'text', 'value': 'hi, there, how can i autotrain a model on huggingface. this is my first day here..', 'raw': 'hi, there, how can i autotrain a model on huggingface. this is my first day here..'}]","hi, there, how can i autotrain a model on huggingface. this is my first day here..",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-05-01 20:33:43,2025-05-02 08:29:08.128,"[{'_id': '66be2f57182d5a69aedae0b6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66be2f57182d5a69aedae0b6/rv_WFtFORgfk71iJ68uGs.png', 'fullname': ' ', 'name': 'Blazgo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/omaragent03/396977978461101,524,"{'language': 'en', 'probability': 0.9018369317054749}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,100301513937003,"[{'type': 'text', 'value': 'The ', 'raw': 'The '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'meta-llama'}, 'url': 'https://huggingface.co/meta-llama', 'raw': 'https://huggingface.co/meta-llama', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646cf8084eefb026fb8fd8bc/oCTqufkdTkjyGodsx1vo1.png'}, {'type': 'text', 'value': ' org just crossed 40,000 followers on Hugging Face. Grateful for all their impact on the field sharing the Llama weights openly and much more! ', 'raw': ' org just crossed 40,000 followers on Hugging Face. Grateful for all their impact on the field sharing the Llama weights openly and much more! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We need more of this from all other big tech to make the AI more open, collaborative and beneficial to all!', 'raw': 'We need more of this from all other big tech to make the AI more open, collaborative and beneficial to all!'}]","The https://huggingface.co/meta-llama org just crossed 40,000 followers on Hugging Face. Grateful for all their impact on the field sharing the Llama weights openly and much more! + +We need more of this from all other big tech to make the AI more open, collaborative and beneficial to all!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/k8gP28f8DJfURuGwdARnX.png'}]",[],"[{'reaction': '🤗', 'users': ['merterbak', 'John6666', 'NeoPy', 'mrdbourke'], 'count': 4}]",2025-05-01 20:25:11,2025-05-01 20:25:24.555,[],/posts/clem/100301513937003,1554,"{'language': 'en', 'probability': 0.9301279783248901}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64371b564aacf7bf786fb530/0lZEdVu06bx11fy1uTjpt.jpeg,495.0,Nymbo,Nymbo,670666758969142,"[{'type': 'text', 'value': 'PSA for anyone using ', 'raw': 'PSA for anyone using '}, {'type': 'inline_code', 'code': 'Nymbo/Nymbo_Theme', 'raw': '`Nymbo/Nymbo_Theme`'}, {'type': 'text', 'value': ' or ', 'raw': ' or '}, {'type': 'inline_code', 'code': 'Nymbo/Nymbo_Theme_5', 'raw': '`Nymbo/Nymbo_Theme_5`'}, {'type': 'text', 'value': ' in a Gradio space ~', 'raw': ' in a Gradio space ~'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Both of these themes have been updated to fix some of the long-standing inconsistencies ever since the transition to Gradio v5. Textboxes are no longer bright green and ', 'raw': 'Both of these themes have been updated to fix some of the long-standing inconsistencies ever since the transition to Gradio v5. Textboxes are no longer bright green and '}, {'type': 'inline_code', 'code': 'in-line code', 'raw': '`in-line code`'}, {'type': 'text', 'value': ' is readable now! Both themes are now visually identical across versions.', 'raw': ' is readable now! Both themes are now visually identical across versions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If your space is already using one of these themes, you just need to restart your space to get the latest version. No code changes needed.', 'raw': 'If your space is already using one of these themes, you just need to restart your space to get the latest version. No code changes needed.'}]","PSA for anyone using `Nymbo/Nymbo_Theme` or `Nymbo/Nymbo_Theme_5` in a Gradio space ~ + +Both of these themes have been updated to fix some of the long-standing inconsistencies ever since the transition to Gradio v5. Textboxes are no longer bright green and `in-line code` is readable now! Both themes are now visually identical across versions. + +If your space is already using one of these themes, you just need to restart your space to get the latest version. No code changes needed.",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}, {'reaction': '🔥', 'users': ['John6666'], 'count': 1}]",2025-05-01 19:04:17,2025-05-02 02:34:51.833,[],/posts/Nymbo/670666758969142,2703,"{'language': 'en', 'probability': 0.8915389180183411}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png,3314.0,ben burtenshaw,burtenshaw,395006544317053,"[{'type': 'text', 'value': 'Qwen 3 Fine tuning >> MoE. Update the experiment thread to include config and script for fine-tuning the Qwen3-30B-A3B model.', 'raw': 'Qwen 3 Fine tuning >> MoE. Update the experiment thread to include config and script for fine-tuning the Qwen3-30B-A3B model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The goal is to make a low latency non-thinking model for a daily driver coding, so 3 billion parameters active should be perfect. ', 'raw': 'The goal is to make a low latency non-thinking model for a daily driver coding, so 3 billion parameters active should be perfect. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔️ training running', 'raw': '✔️ training running'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔️ evals running', 'raw': '✔️ evals running'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⏭️ improve dataset', 'raw': '⏭️ improve dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The moe isn't going to fit into colab's A100 even with quantization (🙏 "", 'raw': ""The moe isn't going to fit into colab's A100 even with quantization (🙏 ""}, {'type': 'mention', 'user': 'UnslothAI', 'raw': '@UnslothAI'}, {'type': 'text', 'value': "" ). So I've been working on HF spaces' H100s for this. Everything is available in the tread and I'll share more tomorrow."", 'raw': "" ). So I've been working on HF spaces' H100s for this. Everything is available in the tread and I'll share more tomorrow.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'burtenshaw/Qwen3-Code-Lite', 'discussionNum': 1}, 'url': 'https://huggingface.co/burtenshaw/Qwen3-Code-Lite/discussions/1', 'raw': 'https://huggingface.co/burtenshaw/Qwen3-Code-Lite/discussions/1'}]","Qwen 3 Fine tuning >> MoE. Update the experiment thread to include config and script for fine-tuning the Qwen3-30B-A3B model. + +The goal is to make a low latency non-thinking model for a daily driver coding, so 3 billion parameters active should be perfect. + +✔️ training running +✔️ evals running +⏭️ improve dataset + +The moe isn't going to fit into colab's A100 even with quantization (🙏 @UnslothAI ). So I've been working on HF spaces' H100s for this. Everything is available in the tread and I'll share more tomorrow. + +https://huggingface.co/burtenshaw/Qwen3-Code-Lite/discussions/1",[],[],"[{'reaction': '👍', 'users': ['John6666', 'BuiDoan', 'FalconNet', 'Ernani', 'thinhlpg', 'jtroybaker'], 'count': 6}, {'reaction': '🧠', 'users': ['21world', 'thinhlpg'], 'count': 2}]",2025-05-01 18:57:19,2025-05-01 18:57:19.992,[],/posts/burtenshaw/395006544317053,2310,"{'language': 'en', 'probability': 0.9095587730407715}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6624060ca7474618eba830e9/bXwGETx1e2gykm8VkBF03.jpeg,1.0,Shanaka Anuradha Samarakoon,shanaka95,618061125284789,"[{'type': 'text', 'value': 'Let’s Play the Chrome Dino Game with Reinforcement Learning! 🎉 ', 'raw': 'Let’s Play the Chrome Dino Game with Reinforcement Learning! 🎉 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Reinforcement Learning has been one of my favorite areas of interest for a while. This is a project I worked on a while ago while learning the fundamentals of reinforcement learning.', 'raw': 'Reinforcement Learning has been one of my favorite areas of interest for a while. This is a project I worked on a while ago while learning the fundamentals of reinforcement learning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I believe the OpenAI Gym library offers an excellent way to standardize environments for RL agents. While there are many ready-to-use Gym environments available for learning and testing, you don’t fully understand how they work until you build your own custom Gym environment ⚙️ ', 'raw': 'I believe the OpenAI Gym library offers an excellent way to standardize environments for RL agents. While there are many ready-to-use Gym environments available for learning and testing, you don’t fully understand how they work until you build your own custom Gym environment ⚙️ '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Creating your own environment helps you grasp the core concepts behind RL.', 'raw': 'Creating your own environment helps you grasp the core concepts behind RL.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'On the other hand, Stable Baselines3 offers PyTorch implementations of popular RL algorithms like PPO and DQN. The best part is that Gym environments are fully compatible with Stable Baselines3, making it easy to benchmark different models and compare their performance.', 'raw': 'On the other hand, Stable Baselines3 offers PyTorch implementations of popular RL algorithms like PPO and DQN. The best part is that Gym environments are fully compatible with Stable Baselines3, making it easy to benchmark different models and compare their performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm open-sourcing this project as a helpful starting point for anyone interested in learning how to :"", 'raw': ""I'm open-sourcing this project as a helpful starting point for anyone interested in learning how to :""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Build a custom RL environment using the OpenAI Gym library', 'raw': '* Build a custom RL environment using the OpenAI Gym library'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Train RL agents using Stable Baselines3', 'raw': '* Train RL agents using Stable Baselines3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""* Use the Chrome DevTools Protocol for direct communication between a Python script and the Chrome browser. This is especially useful if you're interested in web scraping or browser automation (another one of my all-time favorite topics 🤩 )"", 'raw': ""* Use the Chrome DevTools Protocol for direct communication between a Python script and the Chrome browser. This is especially useful if you're interested in web scraping or browser automation (another one of my all-time favorite topics 🤩 )""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also, this project uses image preprocessing with Sobel edge detection, a basic feature extraction technique commonly used in image processing and by deep neural networks.', 'raw': 'Also, this project uses image preprocessing with Sobel edge detection, a basic feature extraction technique commonly used in image processing and by deep neural networks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've also included pre-trained model checkpoints saved every 100,000 timesteps, up to 1 million timesteps. If you'd like to test the project without training from scratch, you can simply load and use one of these pre-trained models."", 'raw': ""I've also included pre-trained model checkpoints saved every 100,000 timesteps, up to 1 million timesteps. If you'd like to test the project without training from scratch, you can simply load and use one of these pre-trained models.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I hope this project helps someone learn something new and exciting!', 'raw': 'I hope this project helps someone learn something new and exciting!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'shanaka95/AIDino'}, 'url': 'https://huggingface.co/shanaka95/AIDino', 'raw': 'https://huggingface.co/shanaka95/AIDino'}, {'type': 'new_line', 'raw': '\n'}]","Let’s Play the Chrome Dino Game with Reinforcement Learning! 🎉 + +Reinforcement Learning has been one of my favorite areas of interest for a while. This is a project I worked on a while ago while learning the fundamentals of reinforcement learning. + +I believe the OpenAI Gym library offers an excellent way to standardize environments for RL agents. While there are many ready-to-use Gym environments available for learning and testing, you don’t fully understand how they work until you build your own custom Gym environment ⚙️ + +Creating your own environment helps you grasp the core concepts behind RL. + +On the other hand, Stable Baselines3 offers PyTorch implementations of popular RL algorithms like PPO and DQN. The best part is that Gym environments are fully compatible with Stable Baselines3, making it easy to benchmark different models and compare their performance. + +I'm open-sourcing this project as a helpful starting point for anyone interested in learning how to : + +* Build a custom RL environment using the OpenAI Gym library +* Train RL agents using Stable Baselines3 +* Use the Chrome DevTools Protocol for direct communication between a Python script and the Chrome browser. This is especially useful if you're interested in web scraping or browser automation (another one of my all-time favorite topics 🤩 ) + +Also, this project uses image preprocessing with Sobel edge detection, a basic feature extraction technique commonly used in image processing and by deep neural networks. + +I've also included pre-trained model checkpoints saved every 100,000 timesteps, up to 1 million timesteps. If you'd like to test the project without training from scratch, you can simply load and use one of these pre-trained models. + +I hope this project helps someone learn something new and exciting! + +https://huggingface.co/shanaka95/AIDino +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6624060ca7474618eba830e9/mzWN0892HoTIzur-bq7lP.mp4'}]",[],"[{'reaction': '❤️', 'users': ['ProCreations', 'samueltelessilva', 'John6666'], 'count': 3}]",2025-05-01 18:23:53,2025-05-01 18:24:24.680,[],/posts/shanaka95/618061125284789,1287,"{'language': 'en', 'probability': 0.9143855571746826}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png,883.0,Abubakar Abid,abidlabs,810486848644944,"[{'type': 'text', 'value': 'HOW TO ADD MCP SUPPORT TO ANY 🤗 SPACE', 'raw': 'HOW TO ADD MCP SUPPORT TO ANY 🤗 SPACE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Gradio now supports MCP! If you want to convert an existing Space, like this one ', 'raw': 'Gradio now supports MCP! If you want to convert an existing Space, like this one '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'hexgrad/Kokoro-TTS'}, 'url': 'https://huggingface.co/spaces/hexgrad/Kokoro-TTS', 'raw': 'https://huggingface.co/spaces/hexgrad/Kokoro-TTS'}, {'type': 'text', 'value': "", so that you can use it with Claude Desktop / Cursor / Cline / TinyAgents / or any LLM that supports MCP, here's all you need to do:"", 'raw': "", so that you can use it with Claude Desktop / Cursor / Cline / TinyAgents / or any LLM that supports MCP, here's all you need to do:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Duplicate the Space (in the Settings Tab)', 'raw': '1. Duplicate the Space (in the Settings Tab)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Upgrade the Gradio ', 'raw': '2. Upgrade the Gradio '}, {'type': 'inline_code', 'code': 'sdk_version', 'raw': '`sdk_version`'}, {'type': 'text', 'value': ' to ', 'raw': ' to '}, {'type': 'inline_code', 'code': '5.28', 'raw': '`5.28`'}, {'type': 'text', 'value': ' (in the ', 'raw': ' (in the '}, {'type': 'inline_code', 'code': 'README.md', 'raw': '`README.md`'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Set ', 'raw': '3. Set '}, {'type': 'inline_code', 'code': 'mcp_server=True', 'raw': '`mcp_server=True`'}, {'type': 'text', 'value': ' in ', 'raw': ' in '}, {'type': 'inline_code', 'code': 'launch()', 'raw': '`launch()`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. (Optionally) add docstrings to the function so that the LLM knows how to use it, like this:', 'raw': '4. (Optionally) add docstrings to the function so that the LLM knows how to use it, like this:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'py', 'code': 'def generate(text, speed=1):\n """"""\n Convert text to speech audio.\n\n Parameters:\n text (str): The input text to be converted to speech.\n speed (float, optional): Playback speed of the generated speech. ', 'raw': '```py\ndef generate(text, speed=1):\n """"""\n Convert text to speech audio.\n\n Parameters:\n text (str): The input text to be converted to speech.\n speed (float, optional): Playback speed of the generated speech. \n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""That's it! Now your LLM will be able to talk to you 🤯"", 'raw': ""That's it! Now your LLM will be able to talk to you 🤯""}]","HOW TO ADD MCP SUPPORT TO ANY 🤗 SPACE + +Gradio now supports MCP! If you want to convert an existing Space, like this one https://huggingface.co/spaces/hexgrad/Kokoro-TTS, so that you can use it with Claude Desktop / Cursor / Cline / TinyAgents / or any LLM that supports MCP, here's all you need to do: + +1. Duplicate the Space (in the Settings Tab) +2. Upgrade the Gradio `sdk_version` to `5.28` (in the `README.md`) +3. Set `mcp_server=True` in `launch()` +4. (Optionally) add docstrings to the function so that the LLM knows how to use it, like this: + +```py +def generate(text, speed=1): + """""" + Convert text to speech audio. + + Parameters: + text (str): The input text to be converted to speech. + speed (float, optional): Playback speed of the generated speech. +``` + +That's it! Now your LLM will be able to talk to you 🤯",[],[],"[{'reaction': '❤️', 'users': ['clem', 'on1onmangoes', 'fdaudens', 'seouri', 'not-lain', 'daniel-ltw', 'John6666', 'lonikar', 'linoyts', 'gh640', 'pratikbhavsar', 'JackCloudman', 'closestfriend', 'Kaiwan', 'mexicanamerican', 'gladiatorsociety', 'AdinaY', 'BrigitteTousi', 'Nymbo', 'shern', 'tinkersnot'], 'count': 21}, {'reaction': '🔥', 'users': ['fffiloni', 'benhaotang', 'BrigitteTousi', 'shern'], 'count': 4}]",2025-05-01 17:42:07,2025-05-01 17:42:18.500,[],/posts/abidlabs/810486848644944,5009,"{'language': 'en', 'probability': 0.7607644200325012}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/z82tUCF_X18mMaP7m0NCQ.png,924.0,openfree,openfree,886239913784185,"[{'type': 'text', 'value': '🚀 Introducing Phi-4-reasoning-plus: Powerful 14B Reasoning Model by Microsoft!', 'raw': '🚀 Introducing Phi-4-reasoning-plus: Powerful 14B Reasoning Model by Microsoft!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'VIDraft/phi-4-reasoning-plus'}, 'url': 'https://huggingface.co/spaces/VIDraft/phi-4-reasoning-plus', 'raw': 'https://huggingface.co/spaces/VIDraft/phi-4-reasoning-plus'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 Key Highlights', 'raw': '🌟 Key Highlights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Compact Size (14B parameters): Efficient for use in environments with limited computing resources, yet powerful in performance.', 'raw': 'Compact Size (14B parameters): Efficient for use in environments with limited computing resources, yet powerful in performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Extended Context (32k tokens): Capable of handling lengthy and complex input sequences.', 'raw': 'Extended Context (32k tokens): Capable of handling lengthy and complex input sequences.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enhanced Reasoning: Excels at multi-step reasoning, particularly in mathematics, science, and coding challenges.', 'raw': 'Enhanced Reasoning: Excels at multi-step reasoning, particularly in mathematics, science, and coding challenges.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chain-of-Thought Methodology: Provides a detailed reasoning process, followed by concise, accurate summaries.', 'raw': 'Chain-of-Thought Methodology: Provides a detailed reasoning process, followed by concise, accurate summaries.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅 Benchmark Achievements', 'raw': '🏅 Benchmark Achievements'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Despite its smaller size, Phi-4-reasoning-plus has delivered impressive results, often surpassing significantly larger models:', 'raw': 'Despite its smaller size, Phi-4-reasoning-plus has delivered impressive results, often surpassing significantly larger models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mathematical Reasoning (AIME 2025): Achieved an accuracy of 78%, significantly outperforming larger models like DeepSeek-R1 Distilled (51.5%) and Claude-3.7 Sonnet (58.7%).', 'raw': 'Mathematical Reasoning (AIME 2025): Achieved an accuracy of 78%, significantly outperforming larger models like DeepSeek-R1 Distilled (51.5%) and Claude-3.7 Sonnet (58.7%).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Olympiad-level Math (OmniMath): Strong performance with an accuracy of 81.9%, surpassing DeepSeek-R1 Distilled's 63.4%."", 'raw': ""Olympiad-level Math (OmniMath): Strong performance with an accuracy of 81.9%, surpassing DeepSeek-R1 Distilled's 63.4%.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Graduate-Level Science Questions (GPQA-Diamond): Delivered competitive performance at 68.9%, close to larger models and demonstrating its capabilities in advanced scientific reasoning.', 'raw': 'Graduate-Level Science Questions (GPQA-Diamond): Delivered competitive performance at 68.9%, close to larger models and demonstrating its capabilities in advanced scientific reasoning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Coding Challenges (LiveCodeBench): Scored 53.1%, reflecting strong performance among smaller models, though slightly behind specialized coding-focused models.', 'raw': 'Coding Challenges (LiveCodeBench): Scored 53.1%, reflecting strong performance among smaller models, though slightly behind specialized coding-focused models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛡️ Safety and Robustness', 'raw': '🛡️ Safety and Robustness'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Comprehensive safety evaluation completed through Microsoft's independent AI Red Team assessments."", 'raw': ""Comprehensive safety evaluation completed through Microsoft's independent AI Red Team assessments.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'High standards of alignment and responsible AI compliance validated through extensive adversarial testing.', 'raw': 'High standards of alignment and responsible AI compliance validated through extensive adversarial testing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Recommended Applications', 'raw': '🎯 Recommended Applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Phi-4-reasoning-plus is especially suitable for:', 'raw': 'Phi-4-reasoning-plus is especially suitable for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Systems with limited computational resources.', 'raw': 'Systems with limited computational resources.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Latency-sensitive applications requiring quick yet accurate responses.', 'raw': 'Latency-sensitive applications requiring quick yet accurate responses.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📜 License', 'raw': '📜 License'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Freely available under the MIT License for broad accessibility and flexible integration into your projects.', 'raw': 'Freely available under the MIT License for broad accessibility and flexible integration into your projects.'}]","🚀 Introducing Phi-4-reasoning-plus: Powerful 14B Reasoning Model by Microsoft! + +https://huggingface.co/spaces/VIDraft/phi-4-reasoning-plus + +🌟 Key Highlights +Compact Size (14B parameters): Efficient for use in environments with limited computing resources, yet powerful in performance. + +Extended Context (32k tokens): Capable of handling lengthy and complex input sequences. + +Enhanced Reasoning: Excels at multi-step reasoning, particularly in mathematics, science, and coding challenges. + +Chain-of-Thought Methodology: Provides a detailed reasoning process, followed by concise, accurate summaries. + +🏅 Benchmark Achievements +Despite its smaller size, Phi-4-reasoning-plus has delivered impressive results, often surpassing significantly larger models: + +Mathematical Reasoning (AIME 2025): Achieved an accuracy of 78%, significantly outperforming larger models like DeepSeek-R1 Distilled (51.5%) and Claude-3.7 Sonnet (58.7%). + +Olympiad-level Math (OmniMath): Strong performance with an accuracy of 81.9%, surpassing DeepSeek-R1 Distilled's 63.4%. + +Graduate-Level Science Questions (GPQA-Diamond): Delivered competitive performance at 68.9%, close to larger models and demonstrating its capabilities in advanced scientific reasoning. + +Coding Challenges (LiveCodeBench): Scored 53.1%, reflecting strong performance among smaller models, though slightly behind specialized coding-focused models. + +🛡️ Safety and Robustness +Comprehensive safety evaluation completed through Microsoft's independent AI Red Team assessments. + +High standards of alignment and responsible AI compliance validated through extensive adversarial testing. + +🎯 Recommended Applications +Phi-4-reasoning-plus is especially suitable for: +Systems with limited computational resources. +Latency-sensitive applications requiring quick yet accurate responses. + +📜 License +Freely available under the MIT License for broad accessibility and flexible integration into your projects.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66e54edddba1e4fee4500a5a/C2Mgjzl6kvo1icr_MBI2h.png'}]",[],"[{'reaction': '🔥', 'users': ['openfree', 'yokoha', 'gunship999', 'immunobiotech', 'aiqcamp', 'fantaxy', 'aiqtech', 'fantos', 'ginipick', 'seawolf2357', 'ausntmarzi', 'itpasotm', 'John6666', 'arshiaafshani', 'enzostvs', 'kaiiddo', 'seouaguoffice'], 'count': 17}, {'reaction': '🚀', 'users': ['yokoha', 'gunship999', 'immunobiotech', 'aiqcamp', 'espelesito', 'openfree', 'ToluClassics', 'itpasotm', 'fantos'], 'count': 9}, {'reaction': '👀', 'users': ['yokoha', 'gunship999', 'immunobiotech', 'openfree', 'itpasotm', 'fantos'], 'count': 6}, {'reaction': '❤️', 'users': ['yokoha', 'gunship999', 'immunobiotech', 'openfree', 'itpasotm', 'fantos'], 'count': 6}, {'reaction': '🤗', 'users': ['gunship999', 'immunobiotech', 'itpasotm', 'fantos'], 'count': 4}, {'reaction': '😎', 'users': ['gunship999', 'immunobiotech', 'itpasotm'], 'count': 3}, {'reaction': '➕', 'users': ['immunobiotech', 'gunship999'], 'count': 2}, {'reaction': '🤯', 'users': ['espelesito', 'BuiDoan'], 'count': 2}, {'reaction': '🧠', 'users': ['immunobiotech'], 'count': 1}]",2025-05-01 15:27:21,2025-05-02 00:28:27.895,"[{'_id': '63cf44747332dafae2b92fd8', 'avatarUrl': '/avatars/afb6faa09e54cbde7ee0b8b6e1964236.svg', 'fullname': 'rtuuuuuuuur', 'name': 'urtuuuu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '66e54edddba1e4fee4500a5a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/z82tUCF_X18mMaP7m0NCQ.png', 'fullname': 'openfree', 'name': 'openfree', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 924, 'isFollowing': False}]",/posts/openfree/886239913784185,2854,"{'language': 'en', 'probability': 0.86372309923172}",2 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,741796207089297,"[{'type': 'text', 'value': 'Qwen 3 numbers are in! They did a good job this time, compared to 2.5 and QwQ numbers are a lot better. ', 'raw': 'Qwen 3 numbers are in! They did a good job this time, compared to 2.5 and QwQ numbers are a lot better. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I used 2 GGUFs for this, one from LMStudio and one from Unsloth. Number of parameters: 235B A22B. The first one is Q4. Second one is Q8.', 'raw': 'I used 2 GGUFs for this, one from LMStudio and one from Unsloth. Number of parameters: 235B A22B. The first one is Q4. Second one is Q8.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The LLMs that did the comparison are the same, Llama 3.1 70B and Gemma 3 27B. ', 'raw': 'The LLMs that did the comparison are the same, Llama 3.1 70B and Gemma 3 27B. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So I took 2*2 = 4 measurements for each column and took average of measurements.', 'raw': 'So I took 2*2 = 4 measurements for each column and took average of measurements.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My leaderboard is pretty unrelated to others it seems. Valuable in that sense, it is another non-mainstream angle for model evaluation.', 'raw': 'My leaderboard is pretty unrelated to others it seems. Valuable in that sense, it is another non-mainstream angle for model evaluation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More info: ', 'raw': 'More info: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/etemiz/aha-leaderboard', 'raw': 'https://huggingface.co/blog/etemiz/aha-leaderboard'}]","Qwen 3 numbers are in! They did a good job this time, compared to 2.5 and QwQ numbers are a lot better. + +I used 2 GGUFs for this, one from LMStudio and one from Unsloth. Number of parameters: 235B A22B. The first one is Q4. Second one is Q8. + +The LLMs that did the comparison are the same, Llama 3.1 70B and Gemma 3 27B. + +So I took 2*2 = 4 measurements for each column and took average of measurements. + +My leaderboard is pretty unrelated to others it seems. Valuable in that sense, it is another non-mainstream angle for model evaluation. + +More info: https://huggingface.co/blog/etemiz/aha-leaderboard","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a488b5224f96d8cc3754fc/X1kU3h1zKxor9eoN4hkNa.png'}]",[],"[{'reaction': '🔥', 'users': ['nikedeveloper', 'John6666'], 'count': 2}, {'reaction': '👍', 'users': ['BuiDoan'], 'count': 1}]",2025-05-01 15:25:22,2025-05-01 15:46:48.100,"[{'_id': '65a488b5224f96d8cc3754fc', 'avatarUrl': '/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg', 'fullname': 'Emin Temiz', 'name': 'etemiz', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 46, 'isFollowing': False}]",/posts/etemiz/741796207089297,1094,"{'language': 'en', 'probability': 0.9554787874221802}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,842458770326630,"[{'type': 'text', 'value': '🖼️ Clker.com SVG Images Dataset - ', 'raw': '🖼️ Clker.com SVG Images Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/clker-svg'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/clker-svg', 'raw': 'https://huggingface.co/datasets/nyuuzyou/clker-svg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection of 274,859 Public Domain Scalable Vector Graphics (SVG) clipart images featuring:', 'raw': 'Collection of 274,859 Public Domain Scalable Vector Graphics (SVG) clipart images featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Sourced from Clker.com, a platform for user-shared vector clipart', 'raw': '- Sourced from Clker.com, a platform for user-shared vector clipart'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Includes metadata: unique ID, title, tags, and original download URL', 'raw': '- Includes metadata: unique ID, title, tags, and original download URL'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Contains complete SVG XML content for direct use or processing', 'raw': '- Contains complete SVG XML content for direct use or processing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- All images explicitly released into the public domain under CC0 license', 'raw': '- All images explicitly released into the public domain under CC0 license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Organized in a single train split with 274,859 entries', 'raw': '- Organized in a single train split with 274,859 entries'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With nearly 275,000 public domain vector images, this represents one of the largest freely available SVG clipart collections on Hugging Face. While smaller than the SVGFind dataset, its public domain licensing makes it particularly valuable for commercial and unrestricted use cases without attribution requirements.', 'raw': 'With nearly 275,000 public domain vector images, this represents one of the largest freely available SVG clipart collections on Hugging Face. While smaller than the SVGFind dataset, its public domain licensing makes it particularly valuable for commercial and unrestricted use cases without attribution requirements.'}]","🖼️ Clker.com SVG Images Dataset - https://huggingface.co/datasets/nyuuzyou/clker-svg + +Collection of 274,859 Public Domain Scalable Vector Graphics (SVG) clipart images featuring: +- Sourced from Clker.com, a platform for user-shared vector clipart +- Includes metadata: unique ID, title, tags, and original download URL +- Contains complete SVG XML content for direct use or processing +- All images explicitly released into the public domain under CC0 license +- Organized in a single train split with 274,859 entries + +With nearly 275,000 public domain vector images, this represents one of the largest freely available SVG clipart collections on Hugging Face. While smaller than the SVGFind dataset, its public domain licensing makes it particularly valuable for commercial and unrestricted use cases without attribution requirements.",[],[],"[{'reaction': '🔥', 'users': ['espelesito', 'John6666', 'kaiiddo'], 'count': 3}]",2025-05-01 13:50:16,2025-05-01 13:50:29.119,[],/posts/nyuuzyou/842458770326630,1502,"{'language': 'en', 'probability': 0.7484297752380371}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/651e93137b2a2e027f9e55df/5oXWJeEDCrMJLA4s_0I93.png,29.0,Aurélien-Morgan CLAUDON,Aurelien-Morgan,102432746842555,"[{'type': 'text', 'value': 'The Almighty function-caller', 'raw': 'The Almighty function-caller'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How would you like to build smart GenAi infrastructure ?', 'raw': 'How would you like to build smart GenAi infrastructure ?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Give extensive tools memory to your edge agentic system,', 'raw': 'Give extensive tools memory to your edge agentic system,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And optimize the resources it takes to run yet a high-performance set of agents ?', 'raw': 'And optimize the resources it takes to run yet a high-performance set of agents ?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We came up with a novel approach to function-calling at scale for smart companies and corporate-grade use-cases.', 'raw': 'We came up with a novel approach to function-calling at scale for smart companies and corporate-grade use-cases.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read our full-fledged blog article on this here on Hugging Face :', 'raw': 'Read our full-fledged blog article on this here on Hugging Face :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/Aurelien-Morgan/the-almighty-function-caller', 'raw': 'https://huggingface.co/blog/Aurelien-Morgan/the-almighty-function-caller'}, {'type': 'new_line', 'raw': '\n'}]","The Almighty function-caller + +How would you like to build smart GenAi infrastructure ? +Give extensive tools memory to your edge agentic system, +And optimize the resources it takes to run yet a high-performance set of agents ? + +We came up with a novel approach to function-calling at scale for smart companies and corporate-grade use-cases. + +Read our full-fledged blog article on this here on Hugging Face : +https://huggingface.co/blog/Aurelien-Morgan/the-almighty-function-caller +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/651e93137b2a2e027f9e55df/a0M09n7-cVRIE7fsyNSv9.gif'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'victor', 'Sxj114514'], 'count': 3}, {'reaction': '👍', 'users': ['Sxj114514'], 'count': 1}]",2025-04-28 11:16:14,2025-04-28 11:16:14.872,[],/posts/Aurelien-Morgan/102432746842555,3136,"{'language': 'en', 'probability': 0.8588969707489014}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/qW3-oKDLFJpue2iS5VjT2.jpeg,134.0,Jason Corkill,jasoncorkill,653065305581211,"[{'type': 'text', 'value': '🚀 Building Better Evaluations: 32K Image Annotations Now Available', 'raw': '🚀 Building Better Evaluations: 32K Image Annotations Now Available'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Today, we're releasing an expanded version: 32K images annotated with 3.7M responses from over 300K individuals which was completed in under two weeks using the Rapidata Python API."", 'raw': ""Today, we're releasing an expanded version: 32K images annotated with 3.7M responses from over 300K individuals which was completed in under two weeks using the Rapidata Python API.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Rapidata/text-2-image-Rich-Human-Feedback-32k'}, 'url': 'https://huggingface.co/datasets/Rapidata/text-2-image-Rich-Human-Feedback-32k', 'raw': 'https://huggingface.co/datasets/Rapidata/text-2-image-Rich-Human-Feedback-32k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A few months ago, we published one of our most liked dataset with 13K images based on the ', 'raw': 'A few months ago, we published one of our most liked dataset with 13K images based on the '}, {'type': 'mention', 'user': 'data-is-better-together', 'raw': '@data-is-better-together'}, {'type': 'text', 'value': '\'s dataset, following Google\'s research on ""Rich Human Feedback for Text-to-Image Generation"" (', 'raw': '\'s dataset, following Google\'s research on ""Rich Human Feedback for Text-to-Image Generation"" ('}, {'type': 'link', 'href': 'https://arxiv.org/abs/2312.10240', 'raw': 'https://arxiv.org/abs/2312.10240'}, {'type': 'text', 'value': '). It collected over 1.5M responses from 150K+ participants.', 'raw': '). It collected over 1.5M responses from 150K+ participants.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Rapidata/text-2-image-Rich-Human-Feedback'}, 'url': 'https://huggingface.co/datasets/Rapidata/text-2-image-Rich-Human-Feedback', 'raw': 'https://huggingface.co/datasets/Rapidata/text-2-image-Rich-Human-Feedback'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the examples below, users highlighted words from prompts that were not correctly depicted in the generated images. Higher word scores indicate more frequent issues. If an image captured the prompt accurately, users could select [No_mistakes].', 'raw': 'In the examples below, users highlighted words from prompts that were not correctly depicted in the generated images. Higher word scores indicate more frequent issues. If an image captured the prompt accurately, users could select [No_mistakes].'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're continuing to work on large-scale human feedback and model evaluation. If you're working on related research and need large, high-quality annotations, feel free to get in touch: info@rapidata.ai."", 'raw': ""We're continuing to work on large-scale human feedback and model evaluation. If you're working on related research and need large, high-quality annotations, feel free to get in touch: info@rapidata.ai.""}]","🚀 Building Better Evaluations: 32K Image Annotations Now Available + +Today, we're releasing an expanded version: 32K images annotated with 3.7M responses from over 300K individuals which was completed in under two weeks using the Rapidata Python API. + +https://huggingface.co/datasets/Rapidata/text-2-image-Rich-Human-Feedback-32k + +A few months ago, we published one of our most liked dataset with 13K images based on the @data-is-better-together's dataset, following Google's research on ""Rich Human Feedback for Text-to-Image Generation"" (https://arxiv.org/abs/2312.10240). It collected over 1.5M responses from 150K+ participants. + +https://huggingface.co/datasets/Rapidata/text-2-image-Rich-Human-Feedback + +In the examples below, users highlighted words from prompts that were not correctly depicted in the generated images. Higher word scores indicate more frequent issues. If an image captured the prompt accurately, users could select [No_mistakes]. + +We're continuing to work on large-scale human feedback and model evaluation. If you're working on related research and need large, high-quality annotations, feel free to get in touch: info@rapidata.ai.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66f5624c42b853e73e0738eb/h_QVr78yX_jqk95vedC5k.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66f5624c42b853e73e0738eb/158QcJJYTa0d1t9znNIUb.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66f5624c42b853e73e0738eb/pAqi-gilqdEFQm1x_EShz.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66f5624c42b853e73e0738eb/bVFKCaTGb4lcRYFtMZEqy.png'}]",[],"[{'reaction': '🚀', 'users': ['Sneccello', 'tmanuel', 'jasoncorkill', 'jparavicini', 'LucStr', 'linoyts', 'emredeveloper', 'BoosRIce', 'John6666', 'victor', 'ucyang', 'Jobwengi'], 'count': 12}, {'reaction': '❤️', 'users': ['jasoncorkill', 'canwiper', 'ngad', 'BoosRIce'], 'count': 4}, {'reaction': '🔥', 'users': ['Sneccello', 'jasoncorkill', 'JLouisBiz'], 'count': 3}, {'reaction': '👍', 'users': ['Virus707', 'BuiDoan'], 'count': 2}]",2025-04-28 09:28:35,2025-04-28 09:31:13.572,[],/posts/jasoncorkill/653065305581211,5538,"{'language': 'en', 'probability': 0.8645955324172974}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,658134534684379,"[{'type': 'text', 'value': 'Kimi-Audio 🚀🎧 an OPEN audio foundation model released by Moonshot AI ', 'raw': 'Kimi-Audio 🚀🎧 an OPEN audio foundation model released by Moonshot AI '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'moonshotai/Kimi-Audio-7B-Instruct'}, 'url': 'https://huggingface.co/moonshotai/Kimi-Audio-7B-Instruct', 'raw': 'https://huggingface.co/moonshotai/Kimi-Audio-7B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 7B ', 'raw': '✨ 7B '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 13M+ hours of pretraining data', 'raw': '✨ 13M+ hours of pretraining data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Novel hybrid input architecture', 'raw': '✨ Novel hybrid input architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Universal audio capabilities (ASR, AQA, AAC, SER, SEC/ASC, end-to-end conversation)', 'raw': '✨ Universal audio capabilities (ASR, AQA, AAC, SER, SEC/ASC, end-to-end conversation)'}]","Kimi-Audio 🚀🎧 an OPEN audio foundation model released by Moonshot AI +https://huggingface.co/moonshotai/Kimi-Audio-7B-Instruct +✨ 7B +✨ 13M+ hours of pretraining data +✨ Novel hybrid input architecture +✨ Universal audio capabilities (ASR, AQA, AAC, SER, SEC/ASC, end-to-end conversation)","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/PjvqgngErFnAn9_RvvlLh.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['linoyts', 'John6666', 'mikv39', 'victor', 'mcdalir', 'JLouisBiz', 'prithivMLmods', 'Alfaxad', 'kw1004', 'JRizzled', 'optimalprime'], 'count': 11}]",2025-04-28 07:32:23,2025-04-28 07:32:43.006,[],/posts/AdinaY/658134534684379,5132,"{'language': 'en', 'probability': 0.7438077926635742}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,433098376713304,"[{'type': 'text', 'value': '# ✨ Dream of IKEA: The Future of AI Interior Design ✨', 'raw': '# ✨ Dream of IKEA: The Future of AI Interior Design ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hello, AI interior design enthusiasts! 🏠 Today I\'m thrilled to introduce you to **""Dream of IKEA""** - an amazing project that will completely transform your living spaces!', 'raw': 'Hello, AI interior design enthusiasts! 🏠 Today I\'m thrilled to introduce you to **""Dream of IKEA""** - an amazing project that will completely transform your living spaces!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🌟 What Can It Do?', 'raw': '## 🌟 What Can It Do?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '**Dream of IKEA** is a magical tool that uses artificial intelligence to transform your ordinary spaces into the interior design of your dreams! 🪄', 'raw': '**Dream of IKEA** is a magical tool that uses artificial intelligence to transform your ordinary spaces into the interior design of your dreams! 🪄'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📸 Simply upload a photo of your room', 'raw': '- 📸 Simply upload a photo of your room'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 💭 Describe your desired style or concept', 'raw': '- 💭 Describe your desired style or concept'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🎨 The AI will redesign your space with stunning results!', 'raw': '- 🎨 The AI will redesign your space with stunning results!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🏆 Key Features', 'raw': '## 🏆 Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Diverse Style Selection** - Over 20 design styles including Minimalist, Bohemian, Japanese, Scandinavian, and more', 'raw': '- **Diverse Style Selection** - Over 20 design styles including Minimalist, Bohemian, Japanese, Scandinavian, and more'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **User-Friendly Interface** - Beautiful, intuitive UI that anyone can use', 'raw': '- **User-Friendly Interface** - Beautiful, intuitive UI that anyone can use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **High-Quality Image Generation** - Amazing quality powered by ControlNet and Stable Diffusion', 'raw': '- **High-Quality Image Generation** - Amazing quality powered by ControlNet and Stable Diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Customizable Prompts** - Create completely personalized designs with your own prompts', 'raw': '- **Customizable Prompts** - Create completely personalized designs with your own prompts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🛠️ Technical Highlights', 'raw': '## 🛠️ Technical Highlights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This project utilizes cutting-edge AI technology:', 'raw': 'This project utilizes cutting-edge AI technology:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **ControlNet** - Maintains the structure of your original image while transforming the style', 'raw': '- **ControlNet** - Maintains the structure of your original image while transforming the style'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **NormalBae** - Creates natural transformations through 3D structure recognition', 'raw': '- **NormalBae** - Creates natural transformations through 3D structure recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Stable Diffusion** - The core of high-quality image generation', 'raw': '- **Stable Diffusion** - The core of high-quality image generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 💡 How to Use', 'raw': '## 💡 How to Use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. **Upload a Photo** - Select the space you want to transform', 'raw': '1. **Upload a Photo** - Select the space you want to transform'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. **Choose a Style** - Select from Modern, Classic, or Global design styles', 'raw': '2. **Choose a Style** - Select from Modern, Classic, or Global design styles'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. **Add a Description** - Like ""A cozy bedroom with mountain view"" to refine your results', 'raw': '3. **Add a Description** - Like ""A cozy bedroom with mountain view"" to refine your results'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. **Click Generate** - Let the AI work its magic! 🪄✨', 'raw': '4. **Click Generate** - Let the AI work its magic! 🪄✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🔮 Make Your Dream Space a Reality!', 'raw': '## 🔮 Make Your Dream Space a Reality!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What space are you dreaming of? A minimalist Nordic living room? A glamorous Hollywood-style bedroom? Or perhaps a warm Bohemian kitchen? Now you can visualize all your interior design dreams with the help of AI!', 'raw': 'What space are you dreaming of? A minimalist Nordic living room? A glamorous Hollywood-style bedroom? Or perhaps a warm Bohemian kitchen? Now you can visualize all your interior design dreams with the help of AI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🚀 Start Now!', 'raw': '## 🚀 Start Now!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/interior-design'}, 'url': 'https://huggingface.co/spaces/ginigen/interior-design', 'raw': 'https://huggingface.co/spaces/ginigen/interior-design'}]","# ✨ Dream of IKEA: The Future of AI Interior Design ✨ + +Hello, AI interior design enthusiasts! 🏠 Today I'm thrilled to introduce you to **""Dream of IKEA""** - an amazing project that will completely transform your living spaces! + +## 🌟 What Can It Do? + +**Dream of IKEA** is a magical tool that uses artificial intelligence to transform your ordinary spaces into the interior design of your dreams! 🪄 + +- 📸 Simply upload a photo of your room +- 💭 Describe your desired style or concept +- 🎨 The AI will redesign your space with stunning results! + +## 🏆 Key Features + +- **Diverse Style Selection** - Over 20 design styles including Minimalist, Bohemian, Japanese, Scandinavian, and more +- **User-Friendly Interface** - Beautiful, intuitive UI that anyone can use +- **High-Quality Image Generation** - Amazing quality powered by ControlNet and Stable Diffusion +- **Customizable Prompts** - Create completely personalized designs with your own prompts + +## 🛠️ Technical Highlights + +This project utilizes cutting-edge AI technology: +- **ControlNet** - Maintains the structure of your original image while transforming the style +- **NormalBae** - Creates natural transformations through 3D structure recognition +- **Stable Diffusion** - The core of high-quality image generation + +## 💡 How to Use + +1. **Upload a Photo** - Select the space you want to transform +2. **Choose a Style** - Select from Modern, Classic, or Global design styles +3. **Add a Description** - Like ""A cozy bedroom with mountain view"" to refine your results +4. **Click Generate** - Let the AI work its magic! 🪄✨ + +## 🔮 Make Your Dream Space a Reality! + +What space are you dreaming of? A minimalist Nordic living room? A glamorous Hollywood-style bedroom? Or perhaps a warm Bohemian kitchen? Now you can visualize all your interior design dreams with the help of AI! + +## 🚀 Start Now! +https://huggingface.co/spaces/ginigen/interior-design","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/2lIysIdX6ZgaS2E7GpFIU.png'}]",[],"[{'reaction': '🤗', 'users': ['ginipick', 'uswlbasster', 'ausntmarzi', 'jpmogansev', 'fantaxy', 'aiqtech', 'fantos', 'aiqcamp', 'openfree', 'seawolf2357', 'ltelte999', 'John6666', 'FalconNet', 'yuritorres', 'uncleMehrzad', 'usariversoul', 'denmarkmilk', 'AekDevDev', 'yokoha', 'danielrosehill'], 'count': 20}, {'reaction': '❤️', 'users': ['seawolf2357', 'uswlbasster', 'francisgroup', 'TJ-19', 'ltelte999', 'itpasotm', 'denmarkmilk', 'aiqtech', 'ginipick', 'EquinoxElahin', 'hatif79'], 'count': 11}, {'reaction': '👀', 'users': ['uswlbasster', 'jpmogansev', 'nicolay-r', 'itpasotm', 'denmarkmilk', 'openfree', 'aiqtech', 'HFDoktos'], 'count': 8}, {'reaction': '🔥', 'users': ['uswlbasster', 'itpasotm', 'denmarkmilk', 'aiqtech'], 'count': 4}, {'reaction': '🚀', 'users': ['uswlbasster', 'denmarkmilk'], 'count': 2}, {'reaction': '🤯', 'users': ['jpmogansev', 'denmarkmilk'], 'count': 2}, {'reaction': '🤝', 'users': ['openfree', 'denmarkmilk'], 'count': 2}, {'reaction': '👍', 'users': ['yokoha', 'denmarkmilk'], 'count': 2}, {'reaction': '➕', 'users': ['openfree'], 'count': 1}]",2025-04-28 05:24:13,2025-04-28 05:24:13.161,[],/posts/ginipick/433098376713304,4696,"{'language': 'en', 'probability': 0.7618711590766907}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,748880241075570,"[{'type': 'text', 'value': 'Finally my first solo preprint is here:) a love letter to the field. Nothing much lol, this is just me trying to finetune my understanding of research behind the recent breakthroughs in reasoning models. It’s a preprint targeting beginners in the field - will eventually make necessary changes later. In the meantime have fun with it:)', 'raw': 'Finally my first solo preprint is here:) a love letter to the field. Nothing much lol, this is just me trying to finetune my understanding of research behind the recent breakthroughs in reasoning models. It’s a preprint targeting beginners in the field - will eventually make necessary changes later. In the meantime have fun with it:)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Download: ', 'raw': 'Download: '}, {'type': 'link', 'href': 'https://github.com/Jaykef/Jaykef/blob/main/papers/The-Dawn-of-Thinking-Machines.pdf', 'raw': 'https://github.com/Jaykef/Jaykef/blob/main/papers/The-Dawn-of-Thinking-Machines.pdf'}]","Finally my first solo preprint is here:) a love letter to the field. Nothing much lol, this is just me trying to finetune my understanding of research behind the recent breakthroughs in reasoning models. It’s a preprint targeting beginners in the field - will eventually make necessary changes later. In the meantime have fun with it:) +Download: https://github.com/Jaykef/Jaykef/blob/main/papers/The-Dawn-of-Thinking-Machines.pdf","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/QM_KqZ5r3Qsfhx5VXpcT_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/bjaW19Akl4OCE1RDAP-o9.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/KEr5BUGfjMcnyaYfOtRUe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/uuFaqJFlO9CrgU9IX-3G1.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/ou0RX-kwvxxQiQ6YcGOc4.png'}]",[],"[{'reaction': '🔥', 'users': ['nicolay-r', 'prithivMLmods', 'daniel-ltw', 'popcornhuan', 'JRizzled', 'hetline', 'Sxj114514', 'Yukkkop'], 'count': 8}, {'reaction': '👍', 'users': ['John6666', 'nicolay-r', 'Sxj114514', 'Lansechen'], 'count': 4}]",2025-04-28 01:30:35,2025-04-28 01:30:35.968,[],/posts/Jaward/748880241075570,3122,"{'language': 'en', 'probability': 0.8543237447738647}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a7b196e27a6dbd4861e275/QGn4hdDPNuSRgZVKxzDpq.jpeg,18.0,EnxinSong,Enxin,265134404236234,"[{'type': 'text', 'value': '🎉 Introducing Video-MMLU, a new benchmark for evaluating large multimodal models on classroom-style lectures in math, physics, and chemistry!', 'raw': '🎉 Introducing Video-MMLU, a new benchmark for evaluating large multimodal models on classroom-style lectures in math, physics, and chemistry!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧑\u200d🏫📚Video-MMLU requires strong reasoning capabilities and world knowledge compared to the previous benchmarks for video LMMs.', 'raw': '🧑\u200d🏫📚Video-MMLU requires strong reasoning capabilities and world knowledge compared to the previous benchmarks for video LMMs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Each video comes with two tasks:', 'raw': 'Each video comes with two tasks:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Take Notes — detailed captioning of multi-discipline lectures', 'raw': '📝 Take Notes — detailed captioning of multi-discipline lectures'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Do Quiz — open-ended QA to test reasoning over visuals & proofs', 'raw': '🧠 Do Quiz — open-ended QA to test reasoning over visuals & proofs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We evaluated 90+ models, including vision-blind baselines, open-source models and proprietary ones.', 'raw': 'We evaluated 90+ models, including vision-blind baselines, open-source models and proprietary ones.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📉 We find that existing models generally perform poorly, with accuracy ranging from only 10% to 50%.', 'raw': '📉 We find that existing models generally perform poorly, with accuracy ranging from only 10% to 50%.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📉We also explore how the number of visual tokens and the base LLMs influence performance, offering insights into the interplay between multimodal perception and reasoning in lecture comprehension.', 'raw': '📉We also explore how the number of visual tokens and the base LLMs influence performance, offering insights into the interplay between multimodal perception and reasoning in lecture comprehension.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For more details, please check below:', 'raw': 'For more details, please check below:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2504.14693', 'raw': 'https://arxiv.org/abs/2504.14693'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻 Code: ', 'raw': '💻 Code: '}, {'type': 'link', 'href': 'https://github.com/Espere-1119-Song/Video-MMLU', 'raw': 'https://github.com/Espere-1119-Song/Video-MMLU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Data: ', 'raw': '🧠 Data: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Enxin/Video-MMLU'}, 'url': 'https://huggingface.co/datasets/Enxin/Video-MMLU', 'raw': 'https://huggingface.co/datasets/Enxin/Video-MMLU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Website: ', 'raw': '🌐 Website: '}, {'type': 'link', 'href': 'https://enxinsong.com/Video-MMLU-web/', 'raw': 'https://enxinsong.com/Video-MMLU-web/'}]","🎉 Introducing Video-MMLU, a new benchmark for evaluating large multimodal models on classroom-style lectures in math, physics, and chemistry! + +🧑‍🏫📚Video-MMLU requires strong reasoning capabilities and world knowledge compared to the previous benchmarks for video LMMs. + +Each video comes with two tasks: +📝 Take Notes — detailed captioning of multi-discipline lectures +🧠 Do Quiz — open-ended QA to test reasoning over visuals & proofs + +We evaluated 90+ models, including vision-blind baselines, open-source models and proprietary ones. +📉 We find that existing models generally perform poorly, with accuracy ranging from only 10% to 50%. +📉We also explore how the number of visual tokens and the base LLMs influence performance, offering insights into the interplay between multimodal perception and reasoning in lecture comprehension. + +For more details, please check below: +📄 Paper: https://arxiv.org/abs/2504.14693 +💻 Code: https://github.com/Espere-1119-Song/Video-MMLU +🧠 Data: https://huggingface.co/datasets/Enxin/Video-MMLU +🌐 Website: https://enxinsong.com/Video-MMLU-web/",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-04-27 13:20:15,2025-04-27 13:20:37.510,[],/posts/Enxin/265134404236234,1233,"{'language': 'en', 'probability': 0.8342145085334778}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png,1076.0,John Smith,John6666,369491746519704,"[{'type': 'text', 'value': 'If your Space stops working after restarting mainly for the last 5 days (', 'raw': 'If your Space stops working after restarting mainly for the last 5 days ('}, {'type': 'link', 'href': 'https://discuss.huggingface.co/t/my-space-suddenly-went-offline-the-cpu-cannot-restart/151121/22', 'raw': 'https://discuss.huggingface.co/t/my-space-suddenly-went-offline-the-cpu-cannot-restart/151121/22'}, {'type': 'text', 'value': '), try some of following.', 'raw': '), try some of following.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Add ', 'raw': '1. Add '}, {'type': 'inline_code', 'code': 'pydantic==2.10.6', 'raw': '`pydantic==2.10.6`'}, {'type': 'text', 'value': ' to ', 'raw': ' to '}, {'type': 'inline_code', 'code': 'requirements.txt', 'raw': '`requirements.txt`'}, {'type': 'text', 'value': ' or upgrade Gradio to the latest version.', 'raw': ' or upgrade Gradio to the latest version.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Upgrade PyTorch to 2.2.0 or later (', 'raw': '2. Upgrade PyTorch to 2.2.0 or later ('}, {'type': 'inline_code', 'code': 'torch>=2.2.0', 'raw': '`torch>=2.2.0`'}, {'type': 'text', 'value': ' for Zero GPU space).', 'raw': ' for Zero GPU space).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Fix Transformers to 4.49.0 or earlier (', 'raw': '3. Fix Transformers to 4.49.0 or earlier ('}, {'type': 'inline_code', 'code': 'transformers<=4.49.0', 'raw': '`transformers<=4.49.0`'}, {'type': 'text', 'value': 'for spaces using Transformers or Diffusers).', 'raw': 'for spaces using Transformers or Diffusers).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Fix ', 'raw': '4. Fix '}, {'type': 'inline_code', 'code': 'huggingface_hub', 'raw': '`huggingface_hub`'}, {'type': 'text', 'value': ' to the old version (', 'raw': ' to the old version ('}, {'type': 'inline_code', 'code': 'huggingface_hub==0.25.2', 'raw': '`huggingface_hub==0.25.2`'}, {'type': 'text', 'value': ' for if an error like ', 'raw': ' for if an error like '}, {'type': 'inline_code', 'code': 'cached_download', 'raw': '`cached_download`'}, {'type': 'text', 'value': ' is not available occurs or inference does not work properly)', 'raw': ' is not available occurs or inference does not work properly)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. Specifying ', 'raw': '5. Specifying '}, {'type': 'inline_code', 'code': 'WORKDIR', 'raw': '`WORKDIR`'}, {'type': 'text', 'value': ' in ', 'raw': ' in '}, {'type': 'inline_code', 'code': 'Dockerfile', 'raw': '`Dockerfile`'}, {'type': 'text', 'value': ' may cause the application to fail to start with error 137. (Docker Spaces, ', 'raw': ' may cause the application to fail to start with error 137. (Docker Spaces, '}, {'type': 'link', 'href': 'https://discuss.huggingface.co/t/error-code-137-cache-error/152177', 'raw': 'https://discuss.huggingface.co/t/error-code-137-cache-error/152177'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'About ', 'raw': 'About '}, {'type': 'inline_code', 'code': 'pydantic==2.10.6', 'raw': '`pydantic==2.10.6`'}, {'type': 'text', 'value': ':', 'raw': ':'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://discuss.huggingface.co/t/error-no-api-found/146226', 'raw': 'https://discuss.huggingface.co/t/error-no-api-found/146226'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://discuss.huggingface.co/t/internal-server-error-bool-not-iterable/149494', 'raw': 'https://discuss.huggingface.co/t/internal-server-error-bool-not-iterable/149494'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit:', 'raw': 'Edit:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Zero GPU space has been upgraded from A100 to H200.', 'raw': 'Zero GPU space has been upgraded from A100 to H200.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is likely the reason why older versions of PyTorch are no longer supported.', 'raw': 'This is likely the reason why older versions of PyTorch are no longer supported.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In fact, an error message to that effect was displayed.', 'raw': 'In fact, an error message to that effect was displayed.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'zero-gpu-explorers/README', 'discussionNum': 163}, 'url': 'https://huggingface.co/spaces/zero-gpu-explorers/README/discussions/163', 'raw': 'https://huggingface.co/spaces/zero-gpu-explorers/README/discussions/163'}]","If your Space stops working after restarting mainly for the last 5 days (https://discuss.huggingface.co/t/my-space-suddenly-went-offline-the-cpu-cannot-restart/151121/22), try some of following. +1. Add `pydantic==2.10.6` to `requirements.txt` or upgrade Gradio to the latest version. +2. Upgrade PyTorch to 2.2.0 or later (`torch>=2.2.0` for Zero GPU space). +3. Fix Transformers to 4.49.0 or earlier (`transformers<=4.49.0`for spaces using Transformers or Diffusers). +4. Fix `huggingface_hub` to the old version (`huggingface_hub==0.25.2` for if an error like `cached_download` is not available occurs or inference does not work properly) +5. Specifying `WORKDIR` in `Dockerfile` may cause the application to fail to start with error 137. (Docker Spaces, https://discuss.huggingface.co/t/error-code-137-cache-error/152177) + +About `pydantic==2.10.6`: +https://discuss.huggingface.co/t/error-no-api-found/146226 +https://discuss.huggingface.co/t/internal-server-error-bool-not-iterable/149494 + +Edit: +Zero GPU space has been upgraded from A100 to H200. +This is likely the reason why older versions of PyTorch are no longer supported. +In fact, an error message to that effect was displayed. +https://huggingface.co/spaces/zero-gpu-explorers/README/discussions/163",[],[],"[{'reaction': '👍', 'users': ['eaddario', 'ltelte999', 'qwertyuiopasdfg', 'Thomaxsch88', 'Menyu'], 'count': 5}, {'reaction': '🔥', 'users': ['ltelte999', 'cai-qi', 'Thomaxsch88'], 'count': 3}, {'reaction': '❤️', 'users': ['jbilcke', 'prithivMLmods', 'Thomaxsch88'], 'count': 3}]",2025-04-27 13:16:09,2025-05-28 00:39:37.241,"[{'_id': '672e17b0250205b31794918c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/9wNaK0Dh1aY2O90p8aI_S.png', 'fullname': 'gh', 'name': 'elemico', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/John6666/369491746519704,10181,"{'language': 'en', 'probability': 0.7541639804840088}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,484268922176188,"[{'type': 'text', 'value': '6 Free resources on Reinforcement Learning (RL)', 'raw': '6 Free resources on Reinforcement Learning (RL)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""RL now is where the real action is, it's the engine behind autonomous tech, robots, and the next wave of AI that thinks, moves and solves problems on its own. To stay up to date with what’s happening in RL, we offer some fresh materials on it:"", 'raw': ""RL now is where the real action is, it's the engine behind autonomous tech, robots, and the next wave of AI that thinks, moves and solves problems on its own. To stay up to date with what’s happening in RL, we offer some fresh materials on it:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. ""Reinforcement Learning from Human Feedback"" by Nathan Lambert -> ', 'raw': '1. ""Reinforcement Learning from Human Feedback"" by Nathan Lambert -> '}, {'type': 'link', 'href': 'https://rlhfbook.com/', 'raw': 'https://rlhfbook.com/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's a short introduction to RLHF, explaining instruction tuning, reward modeling, alignment methods, synthetic data, evaluation, and more"", 'raw': ""It's a short introduction to RLHF, explaining instruction tuning, reward modeling, alignment methods, synthetic data, evaluation, and more""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. ""A Course in Reinforcement Learning (2nd Edition)"" by Dimitri P. Bertsekas -> ', 'raw': '2. ""A Course in Reinforcement Learning (2nd Edition)"" by Dimitri P. Bertsekas -> '}, {'type': 'link', 'href': 'https://www.mit.edu/~dimitrib/RLbook.html', 'raw': 'https://www.mit.edu/~dimitrib/RLbook.html'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explains dynamic programming (DP) and RL, diving into rollout algorithms, neural networks, policy learning, etc. It’s packed with solved exercises and real-world examples', 'raw': 'Explains dynamic programming (DP) and RL, diving into rollout algorithms, neural networks, policy learning, etc. It’s packed with solved exercises and real-world examples'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. ""Mathematical Foundations of Reinforcement Learning"" video course by Shiyu Zhao -> ', 'raw': '3. ""Mathematical Foundations of Reinforcement Learning"" video course by Shiyu Zhao -> '}, {'type': 'link', 'href': 'https://www.youtube.com/playlist?list=PLEhdbSEZZbDaFWPX4gehhwB9vJZJ1DNm8', 'raw': 'https://www.youtube.com/playlist?list=PLEhdbSEZZbDaFWPX4gehhwB9vJZJ1DNm8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Offers a mathematical yet friendly introduction to RL, covering Bellman Equation, value iteration, Monte Carlo learning, approximation, policy gradient, actor-critic methods, etc.', 'raw': 'Offers a mathematical yet friendly introduction to RL, covering Bellman Equation, value iteration, Monte Carlo learning, approximation, policy gradient, actor-critic methods, etc.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+ Check out the repo for more: ', 'raw': '+ Check out the repo for more: '}, {'type': 'link', 'href': 'https://github.com/MathFoundationRL/Book-Mathematical-Foundation-of-Reinforcement-Learning', 'raw': 'https://github.com/MathFoundationRL/Book-Mathematical-Foundation-of-Reinforcement-Learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. ""Multi-Agent Reinforcement Learning"" by Stefano V. Albrecht, Filippos Christianos, and Lukas Schäfer -> ', 'raw': '4. ""Multi-Agent Reinforcement Learning"" by Stefano V. Albrecht, Filippos Christianos, and Lukas Schäfer -> '}, {'type': 'link', 'href': 'https://www.marl-book.com/', 'raw': 'https://www.marl-book.com/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Covers models, core ideas of multi-agent RL (MARL) and modern approaches to combining it with deep learning', 'raw': 'Covers models, core ideas of multi-agent RL (MARL) and modern approaches to combining it with deep learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. ""Reinforcement Learning: A Comprehensive Overview"" by Kevin P. Murphy -> ', 'raw': '5. ""Reinforcement Learning: A Comprehensive Overview"" by Kevin P. Murphy -> '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2412.05265', 'raw': 'https://arxiv.org/pdf/2412.05265'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explains RL and sequential decision making, covering value-based, policy-gradient, model-based, multi-agent RL methods, RL+LLMs, and RL+inference and other topics', 'raw': 'Explains RL and sequential decision making, covering value-based, policy-gradient, model-based, multi-agent RL methods, RL+LLMs, and RL+inference and other topics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. Our collection of free courses and books on RL -> ', 'raw': '6. Our collection of free courses and books on RL -> '}, {'type': 'link', 'href': 'https://huggingface.co/posts/Kseniase/884818121094439', 'raw': 'https://huggingface.co/posts/Kseniase/884818121094439'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you liked this, also subscribe to The Turing Post: ', 'raw': 'If you liked this, also subscribe to The Turing Post: '}, {'type': 'link', 'href': 'https://www.turingpost.com/subscribe', 'raw': 'https://www.turingpost.com/subscribe'}]","6 Free resources on Reinforcement Learning (RL) + +RL now is where the real action is, it's the engine behind autonomous tech, robots, and the next wave of AI that thinks, moves and solves problems on its own. To stay up to date with what’s happening in RL, we offer some fresh materials on it: + +1. ""Reinforcement Learning from Human Feedback"" by Nathan Lambert -> https://rlhfbook.com/ +It's a short introduction to RLHF, explaining instruction tuning, reward modeling, alignment methods, synthetic data, evaluation, and more + +2. ""A Course in Reinforcement Learning (2nd Edition)"" by Dimitri P. Bertsekas -> https://www.mit.edu/~dimitrib/RLbook.html +Explains dynamic programming (DP) and RL, diving into rollout algorithms, neural networks, policy learning, etc. It’s packed with solved exercises and real-world examples + +3. ""Mathematical Foundations of Reinforcement Learning"" video course by Shiyu Zhao -> https://www.youtube.com/playlist?list=PLEhdbSEZZbDaFWPX4gehhwB9vJZJ1DNm8 +Offers a mathematical yet friendly introduction to RL, covering Bellman Equation, value iteration, Monte Carlo learning, approximation, policy gradient, actor-critic methods, etc. ++ Check out the repo for more: https://github.com/MathFoundationRL/Book-Mathematical-Foundation-of-Reinforcement-Learning + +4. ""Multi-Agent Reinforcement Learning"" by Stefano V. Albrecht, Filippos Christianos, and Lukas Schäfer -> https://www.marl-book.com/ +Covers models, core ideas of multi-agent RL (MARL) and modern approaches to combining it with deep learning + +5. ""Reinforcement Learning: A Comprehensive Overview"" by Kevin P. Murphy -> https://arxiv.org/pdf/2412.05265 +Explains RL and sequential decision making, covering value-based, policy-gradient, model-based, multi-agent RL methods, RL+LLMs, and RL+inference and other topics + +6. Our collection of free courses and books on RL -> https://huggingface.co/posts/Kseniase/884818121094439 + +If you liked this, also subscribe to The Turing Post: https://www.turingpost.com/subscribe",[],[],"[{'reaction': '❤️', 'users': ['StephenGenusa', 'iirhyl', 'mitesh55', 'prithivMLmods', 'etemiz', 'Junandre', 'ypcui', 'SamJoshua', 'kw1004', 'bangpro'], 'count': 10}, {'reaction': '👍', 'users': ['iloncka', 'Dcas89', 'gxkok', 'pmdj', 'VincentQuan', 'balaji1233', 'uncleMehrzad', 'ypcui', 'kw1004'], 'count': 9}, {'reaction': '👀', 'users': ['John6666', 'Fishtiks', 'TriFullKim', 'alyona0l'], 'count': 4}]",2025-04-27 11:49:12,2025-04-27 11:49:12.919,[],/posts/Kseniase/484268922176188,6534,"{'language': 'en', 'probability': 0.8298830389976501}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/651e93137b2a2e027f9e55df/5oXWJeEDCrMJLA4s_0I93.png,29.0,Aurélien-Morgan CLAUDON,Aurelien-Morgan,545067042346478,"[{'type': 'inline_code', 'code': 'retrain-pipelines 0.1.2', 'raw': '`retrain-pipelines 0.1.2`'}, {'type': 'text', 'value': ' finally dropped. It comes with a hot Hugging Face Hub integration. Go check it out. We have 2 articles about it coming up. One already fully written so, be on the lookout !', 'raw': ' finally dropped. It comes with a hot Hugging Face Hub integration. Go check it out. We have 2 articles about it coming up. One already fully written so, be on the lookout !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'retrain-pipelines', 'raw': '@retrain-pipelines'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Also, I'll be volunteering at GOSIM AI Paris 2025. If you're interested in chatting, hmu."", 'raw': ""Also, I'll be volunteering at GOSIM AI Paris 2025. If you're interested in chatting, hmu.""}, {'type': 'new_line', 'raw': '\n'}]","`retrain-pipelines 0.1.2` finally dropped. It comes with a hot Hugging Face Hub integration. Go check it out. We have 2 articles about it coming up. One already fully written so, be on the lookout ! +@retrain-pipelines + +Also, I'll be volunteering at GOSIM AI Paris 2025. If you're interested in chatting, hmu. +",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-04-27 11:03:53,2025-04-27 11:03:53.506,[],/posts/Aurelien-Morgan/545067042346478,664,"{'language': 'en', 'probability': 0.8962759971618652}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/658f24cb35c41262d64af1a2/vUK_c6K821tq61AxSeV4i.png,41.0,Ed Addario,eaddario,144557041313740,"[{'type': 'text', 'value': 'Until recently, ', 'raw': 'Until recently, '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'watt-ai/watt-tool-70B'}, 'url': 'https://huggingface.co/watt-ai/watt-tool-70B', 'raw': 'https://huggingface.co/watt-ai/watt-tool-70B'}, {'type': 'text', 'value': ' was the best performing model in the Berkeley Function-Calling Leaderboard (', 'raw': ' was the best performing model in the Berkeley Function-Calling Leaderboard ('}, {'type': 'link', 'href': 'https://gorilla.cs.berkeley.edu/leaderboard.html', 'raw': 'https://gorilla.cs.berkeley.edu/leaderboard.html'}, {'type': 'text', 'value': ""), which evaluates LLM's ability to call functions (tools) accurately. The top spot now belongs to "", 'raw': ""), which evaluates LLM's ability to call functions (tools) accurately. The top spot now belongs to ""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Salesforce/Llama-xLAM-2-70b-fc-r'}, 'url': 'https://huggingface.co/Salesforce/Llama-xLAM-2-70b-fc-r', 'raw': 'https://huggingface.co/Salesforce/Llama-xLAM-2-70b-fc-r'}, {'type': 'text', 'value': ' and by a quite wide margin!', 'raw': ' and by a quite wide margin!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Layer-wise quantized versions for both models are available at ', 'raw': 'Layer-wise quantized versions for both models are available at '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'eaddario/Llama-xLAM-2-8b-fc-r-GGUF'}, 'url': 'https://huggingface.co/eaddario/Llama-xLAM-2-8b-fc-r-GGUF', 'raw': 'https://huggingface.co/eaddario/Llama-xLAM-2-8b-fc-r-GGUF'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'eaddario/Watt-Tool-8B-GGUF'}, 'url': 'https://huggingface.co/eaddario/Watt-Tool-8B-GGUF', 'raw': 'https://huggingface.co/eaddario/Watt-Tool-8B-GGUF'}]","Until recently, https://huggingface.co/watt-ai/watt-tool-70B was the best performing model in the Berkeley Function-Calling Leaderboard (https://gorilla.cs.berkeley.edu/leaderboard.html), which evaluates LLM's ability to call functions (tools) accurately. The top spot now belongs to https://huggingface.co/Salesforce/Llama-xLAM-2-70b-fc-r and by a quite wide margin! + +Layer-wise quantized versions for both models are available at https://huggingface.co/eaddario/Llama-xLAM-2-8b-fc-r-GGUF and https://huggingface.co/eaddario/Watt-Tool-8B-GGUF",[],[],"[{'reaction': '🚀', 'users': ['John6666', 'mitesh55', 'prithivMLmods'], 'count': 3}, {'reaction': '👍', 'users': ['Dcas89', 'Thireus'], 'count': 2}]",2025-04-27 09:39:49,2025-04-27 09:39:49.146,[],/posts/eaddario/144557041313740,2279,"{'language': 'en', 'probability': 0.8586880564689636}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,158440599045888,"[{'type': 'text', 'value': 'Came across a very nice submission from ', 'raw': 'Came across a very nice submission from '}, {'type': 'mention', 'user': 'marcodsn', 'raw': '@marcodsn'}, {'type': 'text', 'value': ' for the reasoning datasets competition (', 'raw': ' for the reasoning datasets competition ('}, {'type': 'link', 'href': 'https://huggingface.co/blog/bespokelabs/reasoning-datasets-competition', 'raw': 'https://huggingface.co/blog/bespokelabs/reasoning-datasets-competition'}, {'type': 'text', 'value': '). ', 'raw': '). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The dataset distils reasoning chains from arXiv research papers in biology and economics. Some nice features of the dataset:', 'raw': 'The dataset distils reasoning chains from arXiv research papers in biology and economics. Some nice features of the dataset:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Extracts both the logical structure AND researcher intuition from academic papers ', 'raw': '- Extracts both the logical structure AND researcher intuition from academic papers '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Adopts the persona of researchers ""before experiments"" to capture exploratory thinking ', 'raw': '- Adopts the persona of researchers ""before experiments"" to capture exploratory thinking '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Provides multi-short and single-long reasoning formats with token budgets - Shows 7.2% improvement on MMLU-Pro Economics when fine-tuning a 3B model', 'raw': '- Provides multi-short and single-long reasoning formats with token budgets - Shows 7.2% improvement on MMLU-Pro Economics when fine-tuning a 3B model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's created using the Curator framework with plans to scale across more scientific domains and incorporate multi-modal reasoning with charts and mathematics."", 'raw': ""It's created using the Curator framework with plans to scale across more scientific domains and incorporate multi-modal reasoning with charts and mathematics.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I personally am very excited about datasets like this, which involve creativity in their creation and don't just rely on $$$ to produce a big dataset with little novelty. "", 'raw': ""I personally am very excited about datasets like this, which involve creativity in their creation and don't just rely on $$$ to produce a big dataset with little novelty. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset can be found here: ', 'raw': 'Dataset can be found here: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'marcodsn/academic-chains'}, 'url': 'https://huggingface.co/datasets/marcodsn/academic-chains', 'raw': 'https://huggingface.co/datasets/marcodsn/academic-chains'}, {'type': 'text', 'value': ' (give it a like!)', 'raw': ' (give it a like!)'}]","Came across a very nice submission from @marcodsn for the reasoning datasets competition (https://huggingface.co/blog/bespokelabs/reasoning-datasets-competition). + +The dataset distils reasoning chains from arXiv research papers in biology and economics. Some nice features of the dataset: + +- Extracts both the logical structure AND researcher intuition from academic papers +- Adopts the persona of researchers ""before experiments"" to capture exploratory thinking +- Provides multi-short and single-long reasoning formats with token budgets - Shows 7.2% improvement on MMLU-Pro Economics when fine-tuning a 3B model + +It's created using the Curator framework with plans to scale across more scientific domains and incorporate multi-modal reasoning with charts and mathematics. + +I personally am very excited about datasets like this, which involve creativity in their creation and don't just rely on $$$ to produce a big dataset with little novelty. + +Dataset can be found here: https://huggingface.co/datasets/marcodsn/academic-chains (give it a like!)",[],"[{'_id': '6409e44f96aae64974116fd3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6409e44f96aae64974116fd3/Qu9N5sajeEA2AZlg7bLQr.jpeg', 'fullname': 'Marco De Santis', 'name': 'marcodsn', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '🔥', 'users': ['davanstrien', 'John6666', 'BrigitteTousi', 'onlinesalesandmarketing', 'dvilasuero'], 'count': 5}, {'reaction': '❤️', 'users': ['marcodsn', 'BrigitteTousi'], 'count': 2}]",2025-04-23 09:10:00,2025-04-23 09:10:00.796,[],/posts/davanstrien/158440599045888,2257,"{'language': 'en', 'probability': 0.8910467624664307}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/rZDwRiBcqeqlUQ7mThoyU.jpeg,31.0,Hanna Yukhymenko,hannayukhymenko,367157502110648,"[{'type': 'text', 'value': '🚀 We are delighted to announce MamayLM, a new state-of-the-art efficient Ukrainian LLM!', 'raw': '🚀 We are delighted to announce MamayLM, a new state-of-the-art efficient Ukrainian LLM!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📈 MamayLM surpasses similar-sized models in both English and Ukrainian, while matching or overtaking up to 10x larger models.', 'raw': '📈 MamayLM surpasses similar-sized models in both English and Ukrainian, while matching or overtaking up to 10x larger models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 MamayLM is a 9B model that can run on a single GPU, enabling cost-efficient AI autonomy and adoption across sectors in Ukraine such as education, legal, healthcare, public services and others (e.g., by specializing it to particular use cases). MalayLM is also attractive for organizations wishing to preserve data privacy as it s efficiency allows it to run on a local machine.', 'raw': '📊 MamayLM is a 9B model that can run on a single GPU, enabling cost-efficient AI autonomy and adoption across sectors in Ukraine such as education, legal, healthcare, public services and others (e.g., by specializing it to particular use cases). MalayLM is also attractive for organizations wishing to preserve data privacy as it s efficiency allows it to run on a local machine.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 MamayLM is trained on high-quality Ukrainian data and understands Ukrainian language, culture, and history. It is built on top of Google’s Gemma 2 9B model, but uses a number of new advances stemming from INSAIT’s experience in creating BgGPT, a Bulgarian LLM we released last year, now adopted nationwide and profiled several times by Google as a worldwide success case.', 'raw': '🧠 MamayLM is trained on high-quality Ukrainian data and understands Ukrainian language, culture, and history. It is built on top of Google’s Gemma 2 9B model, but uses a number of new advances stemming from INSAIT’s experience in creating BgGPT, a Bulgarian LLM we released last year, now adopted nationwide and profiled several times by Google as a worldwide success case.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤝 MamayLM is developed in a collaboration between researchers at INSAIT and ETH Zürich and is trained entirely via donations to INSAIT for AI compute resources.', 'raw': '🤝 MamayLM is developed in a collaboration between researchers at INSAIT and ETH Zürich and is trained entirely via donations to INSAIT for AI compute resources.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📥 MamayLM is now freely available to download on INSAIT’s HuggingFace in both full and quantized versions. We also publicly release all Ukrainian benchmarks we evaluated on. ', 'raw': '📥 MamayLM is now freely available to download on INSAIT’s HuggingFace in both full and quantized versions. We also publicly release all Ukrainian benchmarks we evaluated on. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Further, we release blog posts in both English and Ukrainian, sharing our approach to creating MamayLM, hoping to drive further improvements by the community.', 'raw': '📝 Further, we release blog posts in both English and Ukrainian, sharing our approach to creating MamayLM, hoping to drive further improvements by the community.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌎 The release of LLMs for various languages is part of INSAIT’s mission in ensuring countries can achieve AI autonomy in a cost-efficient, controlled, safe and predictable manner.', 'raw': '🌎 The release of LLMs for various languages is part of INSAIT’s mission in ensuring countries can achieve AI autonomy in a cost-efficient, controlled, safe and predictable manner.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MamayLM model and benchmarks: ', 'raw': 'MamayLM model and benchmarks: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'INSAIT-Institute'}, 'url': 'https://huggingface.co/INSAIT-Institute', 'raw': 'https://huggingface.co/INSAIT-Institute', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f1a0700af832a73d0f3e6f/KwpuATq29U2-Fu55OvUHR.png'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog (EN): ', 'raw': 'Blog (EN): '}, {'type': 'link', 'href': 'https://huggingface.co/blog/INSAIT-Institute/mamaylm', 'raw': 'https://huggingface.co/blog/INSAIT-Institute/mamaylm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog (UKR): ', 'raw': 'Blog (UKR): '}, {'type': 'link', 'href': 'https://huggingface.co/blog/INSAIT-Institute/mamaylm-ukr', 'raw': 'https://huggingface.co/blog/INSAIT-Institute/mamaylm-ukr'}]","🚀 We are delighted to announce MamayLM, a new state-of-the-art efficient Ukrainian LLM! + +📈 MamayLM surpasses similar-sized models in both English and Ukrainian, while matching or overtaking up to 10x larger models. + +📊 MamayLM is a 9B model that can run on a single GPU, enabling cost-efficient AI autonomy and adoption across sectors in Ukraine such as education, legal, healthcare, public services and others (e.g., by specializing it to particular use cases). MalayLM is also attractive for organizations wishing to preserve data privacy as it s efficiency allows it to run on a local machine. + +🧠 MamayLM is trained on high-quality Ukrainian data and understands Ukrainian language, culture, and history. It is built on top of Google’s Gemma 2 9B model, but uses a number of new advances stemming from INSAIT’s experience in creating BgGPT, a Bulgarian LLM we released last year, now adopted nationwide and profiled several times by Google as a worldwide success case. + +🤝 MamayLM is developed in a collaboration between researchers at INSAIT and ETH Zürich and is trained entirely via donations to INSAIT for AI compute resources. + +📥 MamayLM is now freely available to download on INSAIT’s HuggingFace in both full and quantized versions. We also publicly release all Ukrainian benchmarks we evaluated on. + +📝 Further, we release blog posts in both English and Ukrainian, sharing our approach to creating MamayLM, hoping to drive further improvements by the community. + +🌎 The release of LLMs for various languages is part of INSAIT’s mission in ensuring countries can achieve AI autonomy in a cost-efficient, controlled, safe and predictable manner. + +MamayLM model and benchmarks: https://huggingface.co/INSAIT-Institute +Blog (EN): https://huggingface.co/blog/INSAIT-Institute/mamaylm +Blog (UKR): https://huggingface.co/blog/INSAIT-Institute/mamaylm-ukr","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/650ed7adf141bc34f91a12ae/3WZzIUpkw0zqZH5HQe4FZ.png'}]",[],"[{'reaction': '🔥', 'users': ['boroi', 'John6666', 'lordjimen', 'hannayukhymenko', 'aalexandrov', 'BrigitteTousi', 'powergen4ai', 'JRizzled', 'samuelsimko', 'stefan-it', 'omaryshchenko', 'AndrewD', 'agentlans'], 'count': 13}]",2025-04-23 07:23:38,2025-04-23 20:26:47.048,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/hannayukhymenko/367157502110648,3512,"{'language': 'en', 'probability': 0.9127151370048523}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/658a4c914bb41498f7d5e3ca/zMJjxfazi9ePc7GZ1jRAE.jpeg,66.0,Pro Creations,ProCreations,770274923447602,"[{'type': 'text', 'value': '🤖 IntellIte‑Chat v1.0 (Coming Soon)', 'raw': '🤖 IntellIte‑Chat v1.0 (Coming Soon)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A compact chat model built for speed, efficiency, and simplicity.', 'raw': 'A compact chat model built for speed, efficiency, and simplicity.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'IntellIte‑Chat v1.0 is the debut model in the IntellIte series—a lightweight conversational transformer crafted to be fast, memory-efficient, and easy to work with. It’s designed for devs and enthusiasts who want sharp results without huge resource demands.', 'raw': 'IntellIte‑Chat v1.0 is the debut model in the IntellIte series—a lightweight conversational transformer crafted to be fast, memory-efficient, and easy to work with. It’s designed for devs and enthusiasts who want sharp results without huge resource demands.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'No fluff. Just chats.', 'raw': 'No fluff. Just chats.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⸻', 'raw': '⸻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Target Specs', 'raw': '🎯 Target Specs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Pretraining Tokens: 4 billion', 'raw': '• Pretraining Tokens: 4 billion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Context Length: 16,384 tokens', 'raw': '• Context Length: 16,384 tokens'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⸻', 'raw': '⸻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Parameters & Architecture', 'raw': '🧠 Parameters & Architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Model Size: ~100M parameters', 'raw': '• Model Size: ~100M parameters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Architecture: Modified GPT-NeoX', 'raw': '• Architecture: Modified GPT-NeoX'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Focus: Chat performance with low latency and efficient memory use', 'raw': '• Focus: Chat performance with low latency and efficient memory use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⸻', 'raw': '⸻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧃 Support the Build', 'raw': '🧃 Support the Build'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Every dollar you donate is an extra amount of VRAM I get to work with. 😅', 'raw': 'Every dollar you donate is an extra amount of VRAM I get to work with. 😅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This project is fully independent and entirely self-funded. If you want to help bring it to life:', 'raw': 'This project is fully independent and entirely self-funded. If you want to help bring it to life:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://buymeacoffee.com/procreations', 'raw': 'https://buymeacoffee.com/procreations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⸻', 'raw': '⸻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💛 Early Supporters', 'raw': '💛 Early Supporters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All early supporters will be credited here when the model launches.', 'raw': 'All early supporters will be credited here when the model launches.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Even the smallest support means the world and pushes this project forward.', 'raw': 'Even the smallest support means the world and pushes this project forward.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Special thanks to:', 'raw': 'Special thanks to:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Maybe you?', 'raw': 'Maybe you?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⸻', 'raw': '⸻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ Development Status', 'raw': '🛠️ Development Status'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Architecture Design: Completed ✅', 'raw': '• Architecture Design: Completed ✅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Dataset Planning: Completed ✅', 'raw': '• Dataset Planning: Completed ✅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Training Code: Near Completion 🛠️', 'raw': '• Training Code: Near Completion 🛠️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Training Launch: Starting Soon ⏳', 'raw': '• Training Launch: Starting Soon ⏳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Evaluation Setup: Coming soon 🔜', 'raw': '• Evaluation Setup: Coming soon 🔜'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Final Release: Coming soon 🔜', 'raw': '• Final Release: Coming soon 🔜'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⸻', 'raw': '⸻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Built to chat. Built on a budget. Built to prove what small models can do.', 'raw': 'Built to chat. Built on a budget. Built to prove what small models can do.'}, {'type': 'new_line', 'raw': '\n'}]","🤖 IntellIte‑Chat v1.0 (Coming Soon) + +A compact chat model built for speed, efficiency, and simplicity. + +IntellIte‑Chat v1.0 is the debut model in the IntellIte series—a lightweight conversational transformer crafted to be fast, memory-efficient, and easy to work with. It’s designed for devs and enthusiasts who want sharp results without huge resource demands. + +No fluff. Just chats. + +⸻ + +🎯 Target Specs +• Pretraining Tokens: 4 billion +• Context Length: 16,384 tokens + +⸻ + +🧠 Parameters & Architecture +• Model Size: ~100M parameters +• Architecture: Modified GPT-NeoX +• Focus: Chat performance with low latency and efficient memory use + +⸻ + +🧃 Support the Build +Every dollar you donate is an extra amount of VRAM I get to work with. 😅 +This project is fully independent and entirely self-funded. If you want to help bring it to life: +👉 https://buymeacoffee.com/procreations + +⸻ + +💛 Early Supporters +All early supporters will be credited here when the model launches. +Even the smallest support means the world and pushes this project forward. + +Special thanks to: +Maybe you? + +⸻ + +🛠️ Development Status +• Architecture Design: Completed ✅ +• Dataset Planning: Completed ✅ +• Training Code: Near Completion 🛠️ +• Training Launch: Starting Soon ⏳ +• Evaluation Setup: Coming soon 🔜 +• Final Release: Coming soon 🔜 + +⸻ + +Built to chat. Built on a budget. Built to prove what small models can do. +",[],[],"[{'reaction': '🔥', 'users': ['victor', 'nqzfaizal77ai'], 'count': 2}]",2025-04-23 05:52:15,2025-04-23 05:52:15.804,[],/posts/ProCreations/770274923447602,1386,"{'language': 'en', 'probability': 0.8572983741760254}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,857345172218118,"[{'type': 'text', 'value': 'New reasoning algo just dropped: Adaptive Parallel Reasoning', 'raw': 'New reasoning algo just dropped: Adaptive Parallel Reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '“we propose Adaptive Parallel Reasoning (APR), a novel reasoning framework that enables language models to orchestrate both serialized and parallel computations end-to-end. APR generalizes existing reasoning methods by enabling adaptive multi-threaded inference using spawn() and join() operations.”', 'raw': '“we propose Adaptive Parallel Reasoning (APR), a novel reasoning framework that enables language models to orchestrate both serialized and parallel computations end-to-end. APR generalizes existing reasoning methods by enabling adaptive multi-threaded inference using spawn() and join() operations.”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2504.15466', 'raw': 'https://arxiv.org/pdf/2504.15466'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/Parallel-Reasoning/APR', 'raw': 'https://github.com/Parallel-Reasoning/APR'}]","New reasoning algo just dropped: Adaptive Parallel Reasoning +“we propose Adaptive Parallel Reasoning (APR), a novel reasoning framework that enables language models to orchestrate both serialized and parallel computations end-to-end. APR generalizes existing reasoning methods by enabling adaptive multi-threaded inference using spawn() and join() operations.” +Paper: https://arxiv.org/pdf/2504.15466 +Code: https://github.com/Parallel-Reasoning/APR","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/4Sc81MG33f1D63P9z9Pmr.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/ZZKi5U6BPYRCZs_j8N45r.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/kAdqyfRT8eFY5-y2WaOtD.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/yzWzYYp7mCTEsQnZi63WM.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/HnpKE4IpvJb8EYwWKCMvF.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'BrigitteTousi', 'd7m', 'JRizzled', 'DTien', 'Akhil-Theerthala'], 'count': 6}, {'reaction': '🔥', 'users': ['chuchuangs', 'dreamhope', 'ejsellers'], 'count': 3}, {'reaction': '😎', 'users': ['Fishtiks', 'faizanmedico'], 'count': 2}]",2025-04-23 04:56:01,2025-04-23 04:56:01.343,[],/posts/Jaward/857345172218118,2256,"{'language': 'en', 'probability': 0.8484553098678589}",0 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,274019815541497,"[{'type': 'text', 'value': 'According to the paper below, when you fine tune a model with harmful code, it turns evil in other areas. ', 'raw': 'According to the paper below, when you fine tune a model with harmful code, it turns evil in other areas. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/abs/2502.17424', 'raw': 'https://arxiv.org/abs/2502.17424'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This may be good news because now turning a model to be beneficial might be easier:', 'raw': 'This may be good news because now turning a model to be beneficial might be easier:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://x.com/ESYudkowsky/status/1894453376215388644', 'raw': 'https://x.com/ESYudkowsky/status/1894453376215388644'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Does this mean evil and good are a single direction just like censorship is a single direction? So in theory one can make a model good doing an abliteration like operation?', 'raw': 'Does this mean evil and good are a single direction just like censorship is a single direction? So in theory one can make a model good doing an abliteration like operation?'}]","According to the paper below, when you fine tune a model with harmful code, it turns evil in other areas. +https://arxiv.org/abs/2502.17424 + +This may be good news because now turning a model to be beneficial might be easier: +https://x.com/ESYudkowsky/status/1894453376215388644 + +Does this mean evil and good are a single direction just like censorship is a single direction? So in theory one can make a model good doing an abliteration like operation?",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-04-23 01:15:17,2025-04-23 06:50:33.582,"[{'_id': '67489f25cdcb8089a3f58efe', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/eygPd6hO_zQZtFmsWEbAf.png', 'fullname': 'Fabien Herry', 'name': 'Gahnos', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/etemiz/274019815541497,565,"{'language': 'en', 'probability': 0.8910455107688904}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1626214544196-60c757ea5f9a76ab3f844f12.png,190.0,Margaret Mitchell,meg,994494979628937,"[{'type': 'text', 'value': 'New launch: See the energy use of chatbot conversations, in real time. =) ', 'raw': 'New launch: See the energy use of chatbot conversations, in real time. =) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'jdelavande/chat-ui-energy'}, 'url': 'https://huggingface.co/spaces/jdelavande/chat-ui-energy', 'raw': 'https://huggingface.co/spaces/jdelavande/chat-ui-energy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Great work from ', 'raw': 'Great work from '}, {'type': 'mention', 'user': 'JulienDelavande', 'raw': '@JulienDelavande'}, {'type': 'text', 'value': ' !', 'raw': ' !'}]","New launch: See the energy use of chatbot conversations, in real time. =) +https://huggingface.co/spaces/jdelavande/chat-ui-energy +Great work from @JulienDelavande !",[],"[{'_id': '6585887492a5db7dccc4e65c', 'avatarUrl': '/avatars/10697c581b30f3969e1702cb489c437c.svg', 'fullname': 'Delavande Julien', 'name': 'JulienDelavande', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '🔥', 'users': ['merterbak', 'BrigitteTousi', 'John6666', 'Fishtiks', 'yjernite', 'YannisTevissen', 'powergen4ai', 'KaiserWhoLearns'], 'count': 8}, {'reaction': '❤️', 'users': ['BrigitteTousi', 'Fishtiks', 'yjernite', 'YannisTevissen', 'powergen4ai'], 'count': 5}]",2025-04-22 21:45:40,2025-04-22 21:45:40.974,[],/posts/meg/994494979628937,2629,"{'language': 'en', 'probability': 0.7172305583953857}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,295367997414146,"[{'type': 'text', 'value': 'Energy is a massive constraint for AI but do you even know what energy your chatGPT convos are using? ', 'raw': 'Energy is a massive constraint for AI but do you even know what energy your chatGPT convos are using? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're trying to change this by releasing ChatUI-energy, the first interface where you see in real-time what energy your AI conversations consume. Great work from "", 'raw': ""We're trying to change this by releasing ChatUI-energy, the first interface where you see in real-time what energy your AI conversations consume. Great work from ""}, {'type': 'mention', 'user': 'jdelavande', 'raw': '@jdelavande'}, {'type': 'text', 'value': ' powered by spaces & TGI, available for a dozen of open-source models like Llama, Mistral, Qwen, Gemma and more.', 'raw': ' powered by spaces & TGI, available for a dozen of open-source models like Llama, Mistral, Qwen, Gemma and more.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'jdelavande/chat-ui-energy'}, 'url': 'https://huggingface.co/spaces/jdelavande/chat-ui-energy', 'raw': 'https://huggingface.co/spaces/jdelavande/chat-ui-energy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Should all chat interfaces have this? Just like ingredients have to be shown on products you buy, we need more transparency in AI for users!', 'raw': 'Should all chat interfaces have this? Just like ingredients have to be shown on products you buy, we need more transparency in AI for users!'}]","Energy is a massive constraint for AI but do you even know what energy your chatGPT convos are using? + +We're trying to change this by releasing ChatUI-energy, the first interface where you see in real-time what energy your AI conversations consume. Great work from @jdelavande powered by spaces & TGI, available for a dozen of open-source models like Llama, Mistral, Qwen, Gemma and more. + +https://huggingface.co/spaces/jdelavande/chat-ui-energy + +Should all chat interfaces have this? Just like ingredients have to be shown on products you buy, we need more transparency in AI for users!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/m5mi5QYYcc7ejhmjsWzDr.png'}]","[{'_id': '67ecf57f1f0e7c18eec758c9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67ecf57f1f0e7c18eec758c9/cIJVFNkOkRi9NoLlIuRRX.jpeg', 'fullname': 'Julien Delavande', 'name': 'jdelavande', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 31}]","[{'reaction': '🔥', 'users': ['merterbak', 'meg', 'BrigitteTousi', 'onekq', 'John6666', 'jdelavande', 'victor', 'powergen4ai', 'yjernite', 'nomadicsynth', 'itpasotm', 'JesusCrist'], 'count': 12}, {'reaction': '❤️', 'users': ['BrigitteTousi', 'SamuelHoskin', 'jsulz', 'powergen4ai', 'yjernite', 'darmasrmz'], 'count': 6}, {'reaction': '🤝', 'users': ['JesusCrist'], 'count': 1}]",2025-04-22 21:23:51,2025-04-23 21:40:58.230,"[{'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '67ecf57f1f0e7c18eec758c9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67ecf57f1f0e7c18eec758c9/cIJVFNkOkRi9NoLlIuRRX.jpeg', 'fullname': 'Julien Delavande', 'name': 'jdelavande', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 31, 'isFollowing': False}]",/posts/clem/295367997414146,4044,"{'language': 'en', 'probability': 0.9161659479141235}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,382493254979768,"[{'type': 'text', 'value': 'SkyReels-V2 INFINITE VIDEO🔥♾️🎬 UNLIMITED duration video generation model by Skywork. ', 'raw': 'SkyReels-V2 INFINITE VIDEO🔥♾️🎬 UNLIMITED duration video generation model by Skywork. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> “Finally is here. An Open-Source model that achieves what we all have waiting for: Infinite Length Videos.’’😮', 'raw': '> “Finally is here. An Open-Source model that achieves what we all have waiting for: Infinite Length Videos.’’😮'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.05599'}, 'url': 'https://huggingface.co/papers/2504.05599', 'raw': 'https://huggingface.co/papers/2504.05599', 'label': 'Skywork R1V: Pioneering Multimodal Reasoning with Chain-of-Thought (2504.05599)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Skywork/SkyReels-V2-T2V-14B-720P'}, 'url': 'https://huggingface.co/Skywork/SkyReels-V2-T2V-14B-720P', 'raw': 'https://huggingface.co/Skywork/SkyReels-V2-T2V-14B-720P'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 1.3B & 14B', 'raw': '✨ 1.3B & 14B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Generates infinite length videos using Diffusion Forcing with diffusion models + autoregressive methods', 'raw': '✨ Generates infinite length videos using Diffusion Forcing with diffusion models + autoregressive methods'}, {'type': 'new_line', 'raw': '\n'}]","SkyReels-V2 INFINITE VIDEO🔥♾️🎬 UNLIMITED duration video generation model by Skywork. + +> “Finally is here. An Open-Source model that achieves what we all have waiting for: Infinite Length Videos.’’😮 + +https://huggingface.co/papers/2504.05599 + +Model: https://huggingface.co/Skywork/SkyReels-V2-T2V-14B-720P + +✨ 1.3B & 14B +✨ Generates infinite length videos using Diffusion Forcing with diffusion models + autoregressive methods +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d00458fff501149572827f/pLkxZ_-8Y6hJAIeJW2HDy.qt'}]",[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'John6666', 'victor', 'Fishtiks', 'powergen4ai', 'JK-TK', 'chuchuangs', 'linoyts', 'Chroma111', 'Rsln'], 'count': 10}, {'reaction': '👀', 'users': ['eaddario', 'powergen4ai', 'nomadicsynth', 'Veldaken54', 'Rsln'], 'count': 5}, {'reaction': '🚀', 'users': ['Chroma111'], 'count': 1}]",2025-04-22 17:25:15,2025-04-22 17:25:15.678,[],/posts/samihalawa/382493254979768,2432,"{'language': 'en', 'probability': 0.6821855902671814}",0 +/avatars/bb7607b6db94ec699d08c74ace05629e.svg,,Bhalaji Nagarajan,bhalajin,653958261775340,"[{'type': 'text', 'value': '###### CVPR2025 Workshop Challenge Alert ######', 'raw': '###### CVPR2025 Workshop Challenge Alert ######'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\U0001fae0 Between deadlines, rebuttals, and existential crises??? ""We got you!!!!""', 'raw': '\U0001fae0 Between deadlines, rebuttals, and existential crises??? ""We got you!!!!""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📢 Our new CVPR25 multi-modal challenge is online !!!', 'raw': '📢 Our new CVPR25 multi-modal challenge is online !!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🍽️ Dishcovery: VLM MetaFood Challenge!!!! 🍽️', 'raw': '🍽️ Dishcovery: VLM MetaFood Challenge!!!! 🍽️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '😋🧫 Can your groundbreaking VLM understand the difference between sushi styles, pasta types, or cooking methods from just image + caption pairs?', 'raw': '😋🧫 Can your groundbreaking VLM understand the difference between sushi styles, pasta types, or cooking methods from just image + caption pairs?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Our Task: Match fine-grained images to food descriptions', 'raw': '🌐 Our Task: Match fine-grained images to food descriptions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Challenge Highlights:', 'raw': 'Challenge Highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📦 400K food image-caption pairs, a little taste to get you started !!!', 'raw': '📦 400K food image-caption pairs, a little taste to get you started !!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔬 Got a SoTA VLM? Come test it on our challenging test sets !!!', 'raw': '🔬 Got a SoTA VLM? Come test it on our challenging test sets !!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Challenge for everyone! Easy to use SigLIP baseline is provided !!!', 'raw': '🎯 Challenge for everyone! Easy to use SigLIP baseline is provided !!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Real, synthetic, noisy data – just like real life - Will your VLM redefine how people track their diets??? ( 🗣️ We believe so!!! )', 'raw': '🔍 Real, synthetic, noisy data – just like real life - Will your VLM redefine how people track their diets??? ( 🗣️ We believe so!!! )'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Join the challenge: ', 'raw': '🔗 Join the challenge: '}, {'type': 'link', 'href': 'https://www.kaggle.com/competitions/dishcovery-vlm-mtf-cvpr-2025', 'raw': 'https://www.kaggle.com/competitions/dishcovery-vlm-mtf-cvpr-2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗓️ Deadline: Phase I: 4th of May, 2025 - Phase II: 10th of May, 2025', 'raw': '🗓️ Deadline: Phase I: 4th of May, 2025 - Phase II: 10th of May, 2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Workshop website: ', 'raw': '👉 Workshop website: '}, {'type': 'link', 'href': 'https://sites.google.com/view/cvpr-metafood-2025', 'raw': 'https://sites.google.com/view/cvpr-metafood-2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#CVPR25 #ComputerVision #CV #Deeplearning #DL #VisionLanguage #VLM #multimodal #FoundationModels', 'raw': '#CVPR25 #ComputerVision #CV #Deeplearning #DL #VisionLanguage #VLM #multimodal #FoundationModels'}]","###### CVPR2025 Workshop Challenge Alert ###### + +🫠 Between deadlines, rebuttals, and existential crises??? ""We got you!!!!"" + +📢 Our new CVPR25 multi-modal challenge is online !!! + +🍽️ Dishcovery: VLM MetaFood Challenge!!!! 🍽️ + + +😋🧫 Can your groundbreaking VLM understand the difference between sushi styles, pasta types, or cooking methods from just image + caption pairs? + +🌐 Our Task: Match fine-grained images to food descriptions + + +Challenge Highlights: + +📦 400K food image-caption pairs, a little taste to get you started !!! + +🔬 Got a SoTA VLM? Come test it on our challenging test sets !!! + +🎯 Challenge for everyone! Easy to use SigLIP baseline is provided !!! + +🔍 Real, synthetic, noisy data – just like real life - Will your VLM redefine how people track their diets??? ( 🗣️ We believe so!!! ) + + +🔗 Join the challenge: https://www.kaggle.com/competitions/dishcovery-vlm-mtf-cvpr-2025 + +🗓️ Deadline: Phase I: 4th of May, 2025 - Phase II: 10th of May, 2025 + +👉 Workshop website: https://sites.google.com/view/cvpr-metafood-2025 + + +#CVPR25 #ComputerVision #CV #Deeplearning #DL #VisionLanguage #VLM #multimodal #FoundationModels",[],[],"[{'reaction': '🔥', 'users': ['shivaylamba', 'John6666', 'victor', 'JRizzled'], 'count': 4}]",2025-04-22 14:01:47,2025-04-22 14:02:32.192,[],/posts/bhalajin/653958261775340,1649,"{'language': 'en', 'probability': 0.7438519597053528}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,713065682470379,"[{'type': 'text', 'value': 'Just crossed half a million public apps on Hugging Face. A new public app is created every minute these days 🤯🤯🤯', 'raw': 'Just crossed half a million public apps on Hugging Face. A new public app is created every minute these days 🤯🤯🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""What's your favorite? "", 'raw': ""What's your favorite? ""}, {'type': 'link', 'href': 'http://hf.co/spaces', 'raw': 'http://hf.co/spaces'}]","Just crossed half a million public apps on Hugging Face. A new public app is created every minute these days 🤯🤯🤯 + +What's your favorite? http://hf.co/spaces","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/y8KV7DJhOh-LiCnjNGxv_.png'}]",[],"[{'reaction': '🤗', 'users': ['John6666', 'BrigitteTousi', 'DeltaV-AI', 'not-lain', 'fifteen42', 'victor', 'suayptalha', 'SaylorTwift', 'powergen4ai'], 'count': 9}, {'reaction': '🚀', 'users': ['John6666', 'BrigitteTousi', 'DeltaV-AI', 'SaylorTwift', 'powergen4ai'], 'count': 5}]",2025-04-22 13:56:33,2025-04-25 08:00:23.769,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '67ef8c9642ed1705a95659cc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/Y-t6TALw8qB2aKdGmdhUo.png', 'fullname': 'Powergen', 'name': 'powergen4ai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23, 'isFollowing': False}]",/posts/clem/713065682470379,2991,"{'language': 'en', 'probability': 0.9071763157844543}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1624629516652-5ff5d596f244529b3ec0fb89.png,873.0,Philipp Schmid,philschmid,318540305385241,"[{'type': 'text', 'value': 'Gemini 2.5 Flash is here! We excited launch our first hybrid reasoning Gemini model. In Flash 2.5 developer can turn thinking off.', 'raw': 'Gemini 2.5 Flash is here! We excited launch our first hybrid reasoning Gemini model. In Flash 2.5 developer can turn thinking off.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '**TL;DR:**', 'raw': '**TL;DR:**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🧠\xa0Controllable ""Thinking"" with thinking budget with up to 24k token', 'raw': '- 🧠\xa0Controllable ""Thinking"" with thinking budget with up to 24k token'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🌌\xa01 Million multimodal input\xa0context for text, image, video, audio, and pdf', 'raw': '- 🌌\xa01 Million multimodal input\xa0context for text, image, video, audio, and pdf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🛠️\xa0Function calling, structured output, google search & code execution.', 'raw': '- 🛠️\xa0Function calling, structured output, google search & code execution.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🏦\xa0$0.15 1M input tokens; $0.6 or $3.5 (thinking on) per million output tokens (thinking tokens are billed as output tokens)', 'raw': '- 🏦\xa0$0.15 1M input tokens; $0.6 or $3.5 (thinking on) per million output tokens (thinking tokens are billed as output tokens)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 💡\xa0Knowledge cut of\xa0January 2025', 'raw': '- 💡\xa0Knowledge cut of\xa0January 2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🚀\xa0Rate limits - Free 10 RPM 500 req/day', 'raw': '- 🚀\xa0Rate limits - Free 10 RPM 500 req/day'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🏅Outperforms 2.0 Flash on every benchmark', 'raw': '- 🏅Outperforms 2.0 Flash on every benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it ⬇️', 'raw': 'Try it ⬇️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://aistudio.google.com/prompts/new_chat?model=gemini-2.5-flash-preview-04-17', 'raw': 'https://aistudio.google.com/prompts/new_chat?model=gemini-2.5-flash-preview-04-17'}]","Gemini 2.5 Flash is here! We excited launch our first hybrid reasoning Gemini model. In Flash 2.5 developer can turn thinking off. + +**TL;DR:** +- 🧠 Controllable ""Thinking"" with thinking budget with up to 24k token +- 🌌 1 Million multimodal input context for text, image, video, audio, and pdf +- 🛠️ Function calling, structured output, google search & code execution. +- 🏦 $0.15 1M input tokens; $0.6 or $3.5 (thinking on) per million output tokens (thinking tokens are billed as output tokens) +- 💡 Knowledge cut of January 2025 +- 🚀 Rate limits - Free 10 RPM 500 req/day +- 🏅Outperforms 2.0 Flash on every benchmark + +Try it ⬇️ +https://aistudio.google.com/prompts/new_chat?model=gemini-2.5-flash-preview-04-17",[],[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'Isotonic', 'John6666', 'nyuuzyou', 'cahlen', 'JohnRoger'], 'count': 6}]",2025-04-18 10:05:54,2025-04-18 16:22:01.126,"[{'_id': '67bbd0d30fb795c0505dba0d', 'avatarUrl': '/avatars/8df56130ae8c565b85dd63147648309a.svg', 'fullname': 'Kaviyarasan V', 'name': 'kaveeshwaran', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/philschmid/318540305385241,3252,"{'language': 'en', 'probability': 0.7188336253166199}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,707396604835513,"[{'type': 'text', 'value': '🧑\u200d🏫 I wrote a brief blogpost to give An Introduction to AI Model Optimization Techniques!', 'raw': '🧑\u200d🏫 I wrote a brief blogpost to give An Introduction to AI Model Optimization Techniques!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'URL: ', 'raw': 'URL: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/PrunaAI/introduction-to-ai-model-optimization-techniques', 'raw': 'https://huggingface.co/blog/PrunaAI/introduction-to-ai-model-optimization-techniques'}, {'type': 'new_line', 'raw': '\n'}]","🧑‍🏫 I wrote a brief blogpost to give An Introduction to AI Model Optimization Techniques! + +URL: https://huggingface.co/blog/PrunaAI/introduction-to-ai-model-optimization-techniques +",[],[],"[{'reaction': '🧠', 'users': ['prithivMLmods', 'John6666', 'srushti335', 'nifleisch', 'johnrachwanpruna'], 'count': 5}, {'reaction': '🤗', 'users': ['mikv39', 'nifleisch', 'johnrachwanpruna'], 'count': 3}]",2025-04-18 09:43:34,2025-04-18 09:43:34.684,[],/posts/davidberenstein1957/707396604835513,1719,"{'language': 'en', 'probability': 0.7069687843322754}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,567717355691306,"[{'type': 'text', 'value': 'Dropping an entire collection of Style Intermixing Adapters on StrangerZone HF — including Realism, Anime, Sketch, Texture-Rich 3D Experimentals, Automotive Concept Images, and LoRA models based on Flux.1, SD 3.5 Turbo/Large, Stable Diffusion XL 🎨', 'raw': 'Dropping an entire collection of Style Intermixing Adapters on StrangerZone HF — including Realism, Anime, Sketch, Texture-Rich 3D Experimentals, Automotive Concept Images, and LoRA models based on Flux.1, SD 3.5 Turbo/Large, Stable Diffusion XL 🎨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '╰┈➤Collection : ', 'raw': '╰┈➤Collection : '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ sketch : ', 'raw': '➜ sketch : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/sketch-fav-675ba869c7ceaec7e652ee1c'}, 'url': 'https://huggingface.co/collections/strangerzonehf/sketch-fav-675ba869c7ceaec7e652ee1c', 'raw': 'https://huggingface.co/collections/strangerzonehf/sketch-fav-675ba869c7ceaec7e652ee1c'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ sketch2 : ', 'raw': '➜ sketch2 : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/q-series-sketch-678e3503bf3a661758429717'}, 'url': 'https://huggingface.co/collections/strangerzonehf/q-series-sketch-678e3503bf3a661758429717', 'raw': 'https://huggingface.co/collections/strangerzonehf/q-series-sketch-678e3503bf3a661758429717'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ automotive : ', 'raw': '➜ automotive : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/automotive-3d-675bb31a491d8c264d45d843'}, 'url': 'https://huggingface.co/collections/strangerzonehf/automotive-3d-675bb31a491d8c264d45d843', 'raw': 'https://huggingface.co/collections/strangerzonehf/automotive-3d-675bb31a491d8c264d45d843'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ texture 3d : ', 'raw': '➜ texture 3d : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/flux-3dxl-engine-674833c14a001d5b1fdb5139'}, 'url': 'https://huggingface.co/collections/strangerzonehf/flux-3dxl-engine-674833c14a001d5b1fdb5139', 'raw': 'https://huggingface.co/collections/strangerzonehf/flux-3dxl-engine-674833c14a001d5b1fdb5139'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ super 3d : ', 'raw': '➜ super 3d : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/super-3d-engine-6743231d69f496df97addd2b'}, 'url': 'https://huggingface.co/collections/strangerzonehf/super-3d-engine-6743231d69f496df97addd2b', 'raw': 'https://huggingface.co/collections/strangerzonehf/super-3d-engine-6743231d69f496df97addd2b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ style mix : ', 'raw': '➜ style mix : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/mixer-engine-673582c9c5939d8aa5bf9533'}, 'url': 'https://huggingface.co/collections/strangerzonehf/mixer-engine-673582c9c5939d8aa5bf9533', 'raw': 'https://huggingface.co/collections/strangerzonehf/mixer-engine-673582c9c5939d8aa5bf9533'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ realism : ', 'raw': '➜ realism : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'strangerzonehf/realism-engine-67343495b6daf0fbdb904cc1'}, 'url': 'https://huggingface.co/collections/strangerzonehf/realism-engine-67343495b6daf0fbdb904cc1', 'raw': 'https://huggingface.co/collections/strangerzonehf/realism-engine-67343495b6daf0fbdb904cc1'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '╰┈➤The Entire Collection :', 'raw': '╰┈➤The Entire Collection :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ flux.1 : ', 'raw': '➜ flux.1 : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be'}, 'url': 'https://huggingface.co/collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be', 'raw': 'https://huggingface.co/collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ flux-ultimate-lora-collection : ', 'raw': '➜ flux-ultimate-lora-collection : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'strangerzonehf/Flux-Ultimate-LoRA-Collection'}, 'url': 'https://huggingface.co/strangerzonehf/Flux-Ultimate-LoRA-Collection', 'raw': 'https://huggingface.co/strangerzonehf/Flux-Ultimate-LoRA-Collection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ sd 3.5 large / turbo : ', 'raw': '➜ sd 3.5 large / turbo : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/sd-35-large-lora-671b39d7bc2e7f71a446b163'}, 'url': 'https://huggingface.co/collections/prithivMLmods/sd-35-large-lora-671b39d7bc2e7f71a446b163', 'raw': 'https://huggingface.co/collections/prithivMLmods/sd-35-large-lora-671b39d7bc2e7f71a446b163'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ sdxl : ', 'raw': '➜ sdxl : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/sdxl-dev-models-667803a6d5ac75b59110e527'}, 'url': 'https://huggingface.co/collections/prithivMLmods/sdxl-dev-models-667803a6d5ac75b59110e527', 'raw': 'https://huggingface.co/collections/prithivMLmods/sdxl-dev-models-667803a6d5ac75b59110e527'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '╰┈➤Pages :', 'raw': '╰┈➤Pages :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ page 1: ', 'raw': '➜ page 1: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'strangerzonehf'}, 'url': 'https://huggingface.co/strangerzonehf', 'raw': 'https://huggingface.co/strangerzonehf', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/9VOmpLjgrZNZ6bOEPLyoU.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ page 2: ', 'raw': '➜ page 2: '}, {'type': 'resource', 'resource': {'type': 'user', 'id': 'prithivMLmods'}, 'url': 'https://huggingface.co/prithivMLmods', 'raw': 'https://huggingface.co/prithivMLmods'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➜ demo : ', 'raw': '➜ demo : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/FLUX-LoRA-DLC'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC', 'raw': 'https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.🤗', 'raw': '.🤗'}]","Dropping an entire collection of Style Intermixing Adapters on StrangerZone HF — including Realism, Anime, Sketch, Texture-Rich 3D Experimentals, Automotive Concept Images, and LoRA models based on Flux.1, SD 3.5 Turbo/Large, Stable Diffusion XL 🎨 + +╰┈➤Collection : +➜ sketch : https://huggingface.co/collections/strangerzonehf/sketch-fav-675ba869c7ceaec7e652ee1c +➜ sketch2 : https://huggingface.co/collections/strangerzonehf/q-series-sketch-678e3503bf3a661758429717 +➜ automotive : https://huggingface.co/collections/strangerzonehf/automotive-3d-675bb31a491d8c264d45d843 +➜ texture 3d : https://huggingface.co/collections/strangerzonehf/flux-3dxl-engine-674833c14a001d5b1fdb5139 +➜ super 3d : https://huggingface.co/collections/strangerzonehf/super-3d-engine-6743231d69f496df97addd2b +➜ style mix : https://huggingface.co/collections/strangerzonehf/mixer-engine-673582c9c5939d8aa5bf9533 +➜ realism : https://huggingface.co/collections/strangerzonehf/realism-engine-67343495b6daf0fbdb904cc1 + +╰┈➤The Entire Collection : +➜ flux.1 : https://huggingface.co/collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be +➜ flux-ultimate-lora-collection : https://huggingface.co/strangerzonehf/Flux-Ultimate-LoRA-Collection +➜ sd 3.5 large / turbo : https://huggingface.co/collections/prithivMLmods/sd-35-large-lora-671b39d7bc2e7f71a446b163 +➜ sdxl : https://huggingface.co/collections/prithivMLmods/sdxl-dev-models-667803a6d5ac75b59110e527 + +╰┈➤Pages : +➜ page 1: https://huggingface.co/strangerzonehf +➜ page 2: https://huggingface.co/prithivMLmods +➜ demo : https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC + +.🤗","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/MV0AtWxGXO93s2hKPrKEg.png'}]",[],"[{'reaction': '🔥', 'users': ['C50BARZ', 'John6666', 'SanjuDripinik', 'Chroma111', 'YaTharThShaRma999', 'tazztone', 'Tonic', 'skorr', 'prithivMLmods'], 'count': 9}, {'reaction': '🚀', 'users': ['prithivMLmods'], 'count': 1}]",2025-04-17 22:59:40,2025-04-17 23:21:03.792,[],/posts/prithivMLmods/567717355691306,2918,"{'language': 'en', 'probability': 0.5163012146949768}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,401294184812659,"[{'type': 'text', 'value': 'FramePack Full Tutorial: 1-Click to Install on Windows - Up to 120 Second Image-to-Videos with 6GB > ', 'raw': 'FramePack Full Tutorial: 1-Click to Install on Windows - Up to 120 Second Image-to-Videos with 6GB > '}, {'type': 'link', 'href': 'https://youtu.be/HwMngohRmHg', 'raw': 'https://youtu.be/HwMngohRmHg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tutorial video : ', 'raw': 'Tutorial video : '}, {'type': 'link', 'href': 'https://youtu.be/HwMngohRmHg', 'raw': 'https://youtu.be/HwMngohRmHg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FramePack from legendary lllyasviel full Windows local tutorial with a very advanced Gradio app to generate consistent videos from images with as long as 120 seconds and as low as 6 GB GPUs. This tutorial will show you step by step how to install and use FramePack locall with a very advanced Graido app. Moreover, I have published installers for cloud services such as RunPod and Massed Compute for those GPU poor and who wants to scale.', 'raw': 'FramePack from legendary lllyasviel full Windows local tutorial with a very advanced Gradio app to generate consistent videos from images with as long as 120 seconds and as low as 6 GB GPUs. This tutorial will show you step by step how to install and use FramePack locall with a very advanced Graido app. Moreover, I have published installers for cloud services such as RunPod and Massed Compute for those GPU poor and who wants to scale.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Full Instructions, Installers and Links Shared Post (the one used in the tutorial) ⤵️', 'raw': '🔗 Full Instructions, Installers and Links Shared Post (the one used in the tutorial) ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/click-to-open-post-used-in-tutorial-126855226', 'raw': 'https://www.patreon.com/posts/click-to-open-post-used-in-tutorial-126855226'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 SECourses Official Discord 10500+ Members ⤵️', 'raw': '🔗 SECourses Official Discord 10500+ Members ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388', 'raw': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️', 'raw': '🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://github.com/FurkanGozukara/Stable-Diffusion', 'raw': 'https://github.com/FurkanGozukara/Stable-Diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 SECourses Official Reddit - Stay Subscribed To Learn All The News and More ⤵️', 'raw': '🔗 SECourses Official Reddit - Stay Subscribed To Learn All The News and More ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.reddit.com/r/SECourses/', 'raw': 'https://www.reddit.com/r/SECourses/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 MSI RTX 5090 TRIO FurMark Benchmarking + Overclocking + Noise Testing and Comparing with RTX 3090 TI ⤵️', 'raw': '🔗 MSI RTX 5090 TRIO FurMark Benchmarking + Overclocking + Noise Testing and Comparing with RTX 3090 TI ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://youtu.be/uV3oqdILOmA', 'raw': 'https://youtu.be/uV3oqdILOmA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 RTX 5090 Tested Against FLUX DEV, SD 3.5 Large, SD 3.5 Medium, SDXL, SD 1.5, AMD 9950X + RTX 3090 TI ⤵️', 'raw': '🔗 RTX 5090 Tested Against FLUX DEV, SD 3.5 Large, SD 3.5 Medium, SDXL, SD 1.5, AMD 9950X + RTX 3090 TI ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://youtu.be/jHlGzaDLkto', 'raw': 'https://youtu.be/jHlGzaDLkto'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Packing Input Frame Context in Next-Frame Prediction Models for Video Generation', 'raw': 'Packing Input Frame Context in Next-Frame Prediction Models for Video Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FramePack, to train next-frame (or nextframe-section) prediction models for video generation. The FramePack compresses. Input frames to make the transformer context length a fixed number regardless of the video length.', 'raw': 'FramePack, to train next-frame (or nextframe-section) prediction models for video generation. The FramePack compresses. Input frames to make the transformer context length a fixed number regardless of the video length.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper : ', 'raw': 'Paper : '}, {'type': 'link', 'href': 'https://lllyasviel.github.io/frame_pack_gitpage/pack.pdf', 'raw': 'https://lllyasviel.github.io/frame_pack_gitpage/pack.pdf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Project Page : ', 'raw': 'Project Page : '}, {'type': 'link', 'href': 'https://github.com/lllyasviel/FramePack', 'raw': 'https://github.com/lllyasviel/FramePack'}]","FramePack Full Tutorial: 1-Click to Install on Windows - Up to 120 Second Image-to-Videos with 6GB > https://youtu.be/HwMngohRmHg + +Tutorial video : https://youtu.be/HwMngohRmHg + +FramePack from legendary lllyasviel full Windows local tutorial with a very advanced Gradio app to generate consistent videos from images with as long as 120 seconds and as low as 6 GB GPUs. This tutorial will show you step by step how to install and use FramePack locall with a very advanced Graido app. Moreover, I have published installers for cloud services such as RunPod and Massed Compute for those GPU poor and who wants to scale. + +🔗 Full Instructions, Installers and Links Shared Post (the one used in the tutorial) ⤵️ +▶️ https://www.patreon.com/posts/click-to-open-post-used-in-tutorial-126855226 + +🔗 SECourses Official Discord 10500+ Members ⤵️ +▶️ https://discord.com/servers/software-engineering-courses-secourses-772774097734074388 + +🔗 Stable Diffusion, FLUX, Generative AI Tutorials and Resources GitHub ⤵️ +▶️ https://github.com/FurkanGozukara/Stable-Diffusion + +🔗 SECourses Official Reddit - Stay Subscribed To Learn All The News and More ⤵️ +▶️ https://www.reddit.com/r/SECourses/ + +🔗 MSI RTX 5090 TRIO FurMark Benchmarking + Overclocking + Noise Testing and Comparing with RTX 3090 TI ⤵️ +▶️ https://youtu.be/uV3oqdILOmA + +🔗 RTX 5090 Tested Against FLUX DEV, SD 3.5 Large, SD 3.5 Medium, SDXL, SD 1.5, AMD 9950X + RTX 3090 TI ⤵️ +▶️ https://youtu.be/jHlGzaDLkto + +Packing Input Frame Context in Next-Frame Prediction Models for Video Generation +FramePack, to train next-frame (or nextframe-section) prediction models for video generation. The FramePack compresses. Input frames to make the transformer context length a fixed number regardless of the video length. + +Paper : https://lllyasviel.github.io/frame_pack_gitpage/pack.pdf + +Project Page : https://github.com/lllyasviel/FramePack","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/fqaqQHaxecYn9jR0D0DdA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/ZcEL-dbzXWR3c6W1EYxV0.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Weqv0g8HlyZaG1bdY_2cu.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/-pxI4CBOsfNVpgjJL-mmQ.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/icbZIEa1RzWT2S1BVAI0U.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Tm86oosXqNDx_vgzxDbwe.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/gtyX6pd1ecai5mOb0_alo.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/6UV-bcFMOnfRnrnFd2o2Z.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/OZjgpJA1qEljiyfkt7k_0.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/N0M7caGUW9NIukps0jDxH.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/LNgDqodFFJa8U3E3M5Oi-.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/rL2RWJbEvZHbTupxkgvu9.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/wqwYWi1nqRhuwHl7oqqEe.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/nmjFoDqiYDfA0Qqd3qLiP.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/K-RIlGAAH_P_0REQ2-d2M.gif'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'BuiDoan', 'AekDevDev', 'sn2234', 'cahlen'], 'count': 5}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666', 'ChuckMcSneed'], 'count': 3}, {'reaction': '🤗', 'users': ['MonsterMMORPG', 'robb-0'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-04-17 22:41:16,2025-04-17 22:41:16.651,[],/posts/MonsterMMORPG/401294184812659,2792,"{'language': 'en', 'probability': 0.7506567239761353}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4VOzArmrRaX_DUTxGmm59.jpeg,59.0,Charles McSneed,ChuckMcSneed,490753893228131,"[{'type': 'text', 'value': 'Okay, folks, I need some help with this darn internet thing! My son, Timmy, showed me this interesting… forum thingy. He called it ""/lmg/"" and said it was the place to talk about… well, let\'s just say important matters 😉.', 'raw': 'Okay, folks, I need some help with this darn internet thing! My son, Timmy, showed me this interesting… forum thingy. He called it ""/lmg/"" and said it was the place to talk about… well, let\'s just say important matters 😉.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Timmy says something happened, though! He keeps mumbling about ""Soy Jacks,"" ""4chan is dead"" and ""hacked servers.""', 'raw': 'Timmy says something happened, though! He keeps mumbling about ""Soy Jacks,"" ""4chan is dead"" and ""hacked servers.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So, is this ""/lmg/"" thing GONE forever? Or did it move somewhere else? Timmy isn\'t being very helpful, and I\'m sure some of you bright young minds on here probably know! I want to learn more and I really liked it there!', 'raw': 'So, is this ""/lmg/"" thing GONE forever? Or did it move somewhere else? Timmy isn\'t being very helpful, and I\'m sure some of you bright young minds on here probably know! I want to learn more and I really liked it there!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks in advance for any help!', 'raw': 'Thanks in advance for any help!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '---', 'raw': '---'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'God bless America 🇺🇸', 'raw': 'God bless America 🇺🇸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#WWG1WGA', 'raw': '#WWG1WGA'}]","Okay, folks, I need some help with this darn internet thing! My son, Timmy, showed me this interesting… forum thingy. He called it ""/lmg/"" and said it was the place to talk about… well, let's just say important matters 😉. + +Timmy says something happened, though! He keeps mumbling about ""Soy Jacks,"" ""4chan is dead"" and ""hacked servers."" + +So, is this ""/lmg/"" thing GONE forever? Or did it move somewhere else? Timmy isn't being very helpful, and I'm sure some of you bright young minds on here probably know! I want to learn more and I really liked it there! + +Thanks in advance for any help! + +--- + +God bless America 🇺🇸 +#WWG1WGA","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65644e982bdaccfcd536aff1/tu5B8GOzml0ZVvy-ge_bx.png'}]",[],"[{'reaction': '❤️', 'users': ['quasar-of-mikus', '9voltfan2009'], 'count': 2}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-04-17 22:06:21,2025-04-19 19:15:41.250,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '65644e982bdaccfcd536aff1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4VOzArmrRaX_DUTxGmm59.jpeg', 'fullname': 'Charles McSneed', 'name': 'ChuckMcSneed', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 59, 'isFollowing': False}, {'_id': '65b7c980cdaebfe1349b1aa5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65b7c980cdaebfe1349b1aa5/tQ4DfLEQe2qqplrwKH1Vu.png', 'fullname': 'quasar-of-mikus', 'name': 'quasar-of-mikus', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}]",/posts/ChuckMcSneed/490753893228131,748,"{'language': 'en', 'probability': 0.9766240119934082}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627293523680-607fdcaa7c746d01ecb1917e.png,31.0,Eugene Siow,eugenesiow,891114817044097,"[{'type': 'text', 'value': 'GPT-4.1 dropped this week - and it puts OpenAI back in the race for coding & agentic leadership.', 'raw': 'GPT-4.1 dropped this week - and it puts OpenAI back in the race for coding & agentic leadership.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ API only - no ChatGPT toggle for this.', 'raw': '⚙️ API only - no ChatGPT toggle for this.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻 Coding performance is back on par with Claude 3.7 Sonnet & Gemini 2.5 Pro (though Gemini still leads).', 'raw': '💻 Coding performance is back on par with Claude 3.7 Sonnet & Gemini 2.5 Pro (though Gemini still leads).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💸 Pricing:', 'raw': '💸 Pricing:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Full: $3.50 / 1M tokens', 'raw': '• Full: $3.50 / 1M tokens'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Mini: $0.70 / 1M', 'raw': '• Mini: $0.70 / 1M'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Nano: $0.17 / 1M', 'raw': '• Nano: $0.17 / 1M'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Gemini 2.5 Pro = best price/perf ($3.44 / 1M)', 'raw': '👉 Gemini 2.5 Pro = best price/perf ($3.44 / 1M)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '😵 Claude 3.5 Sonnet = $6 / 1M (!)', 'raw': '😵 Claude 3.5 Sonnet = $6 / 1M (!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Not a ""thinking"" model.', 'raw': '🧠 Not a ""thinking"" model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Mini shines on general reasoning tasks (e.g. GPQA), but only the full model holds up in SWE-bench-verified (GitHub issue solving).', 'raw': '📊 Mini shines on general reasoning tasks (e.g. GPQA), but only the full model holds up in SWE-bench-verified (GitHub issue solving).'}]","GPT-4.1 dropped this week - and it puts OpenAI back in the race for coding & agentic leadership. + +⚙️ API only - no ChatGPT toggle for this. +💻 Coding performance is back on par with Claude 3.7 Sonnet & Gemini 2.5 Pro (though Gemini still leads). +💸 Pricing: +• Full: $3.50 / 1M tokens +• Mini: $0.70 / 1M +• Nano: $0.17 / 1M +👉 Gemini 2.5 Pro = best price/perf ($3.44 / 1M) +😵 Claude 3.5 Sonnet = $6 / 1M (!) + +🧠 Not a ""thinking"" model. +📊 Mini shines on general reasoning tasks (e.g. GPQA), but only the full model holds up in SWE-bench-verified (GitHub issue solving).","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/607fdcaa7c746d01ecb1917e/a8b-3nZMi0rDa5MYc7Dhs.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/607fdcaa7c746d01ecb1917e/i3pxhdTc9EiYYKmsGjTbE.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'Avvert', 'andreivk', 'Ayedunno', 'wsuff', 'p3pp01'], 'count': 6}]",2025-04-17 21:21:14,2025-04-17 21:21:14.715,[],/posts/eugenesiow/891114817044097,1605,"{'language': 'en', 'probability': 0.7215592861175537}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f805de9c81260ff8881ee/WXfMTAlKuHAi4M1fySCJ8.jpeg,178.0,ℏεsam,hesamation,750913380201236,"[{'type': 'text', 'value': 'OpenAI just released a 34-page practical guide to building agents, ', 'raw': 'OpenAI just released a 34-page practical guide to building agents, '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's 10 things it teaches us:"", 'raw': ""Here's 10 things it teaches us:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1➜ agents are different from workflows: they are complete autonomous systems that perform tasks on your behalf. many applications use LLMs for workflows, but this is not an agent.', 'raw': '1➜ agents are different from workflows: they are complete autonomous systems that perform tasks on your behalf. many applications use LLMs for workflows, but this is not an agent.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2➜ use them for tricky stuff: complex decision making, dynamic rules, unstructured data', 'raw': '2➜ use them for tricky stuff: complex decision making, dynamic rules, unstructured data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3➜ core recipe: each agent has three main components: Model (the brain), Tools, Instructions on how to behave', 'raw': '3➜ core recipe: each agent has three main components: Model (the brain), Tools, Instructions on how to behave'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""4➜ choose the right brain: set up evals to get a baseline performance, use a smart model to see what's possible, gradually downgrade the model for cost and speed"", 'raw': ""4➜ choose the right brain: set up evals to get a baseline performance, use a smart model to see what's possible, gradually downgrade the model for cost and speed""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5➜ tools are key: choose well-defined and tested tools. an agent needs tools to retrieve data and context, and take actions.', 'raw': '5➜ tools are key: choose well-defined and tested tools. an agent needs tools to retrieve data and context, and take actions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6➜ instruction matters A LOT: be super clear telling the agent its goals, steps, and rules. Vague instructions = unpredictable agent. Be explicit.', 'raw': '6➜ instruction matters A LOT: be super clear telling the agent its goals, steps, and rules. Vague instructions = unpredictable agent. Be explicit.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""7➜ start simple, then scale: often a single agent with several tools is ok. don't jump to complex multi-agent systems immediately."", 'raw': ""7➜ start simple, then scale: often a single agent with several tools is ok. don't jump to complex multi-agent systems immediately.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '8➜ if you use multi-agents: you can have a ""manager"" agent directing traffic to specialist agents, or have agents hand off tasks to each other.', 'raw': '8➜ if you use multi-agents: you can have a ""manager"" agent directing traffic to specialist agents, or have agents hand off tasks to each other.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""9➜ gaurdrails are a MUST: check user input for weird stuff, make sure the agent isn't about to do something risky, filter out private info, block harmful content. Don't let it run wild."", 'raw': ""9➜ gaurdrails are a MUST: check user input for weird stuff, make sure the agent isn't about to do something risky, filter out private info, block harmful content. Don't let it run wild.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '10➜ build and plan for humans: start small, test, improve. always have a plan for when the agent gets stuck or is about to do something high-risk.', 'raw': '10➜ build and plan for humans: start small, test, improve. always have a plan for when the agent gets stuck or is about to do something high-risk.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Download: ', 'raw': 'Download: '}, {'type': 'link', 'href': 'https://t.co/fJaCkgf7ph', 'raw': 'https://t.co/fJaCkgf7ph'}]","OpenAI just released a 34-page practical guide to building agents, + +Here's 10 things it teaches us: + +1➜ agents are different from workflows: they are complete autonomous systems that perform tasks on your behalf. many applications use LLMs for workflows, but this is not an agent. + +2➜ use them for tricky stuff: complex decision making, dynamic rules, unstructured data + +3➜ core recipe: each agent has three main components: Model (the brain), Tools, Instructions on how to behave + +4➜ choose the right brain: set up evals to get a baseline performance, use a smart model to see what's possible, gradually downgrade the model for cost and speed + +5➜ tools are key: choose well-defined and tested tools. an agent needs tools to retrieve data and context, and take actions. + +6➜ instruction matters A LOT: be super clear telling the agent its goals, steps, and rules. Vague instructions = unpredictable agent. Be explicit. + +7➜ start simple, then scale: often a single agent with several tools is ok. don't jump to complex multi-agent systems immediately. + +8➜ if you use multi-agents: you can have a ""manager"" agent directing traffic to specialist agents, or have agents hand off tasks to each other. + +9➜ gaurdrails are a MUST: check user input for weird stuff, make sure the agent isn't about to do something risky, filter out private info, block harmful content. Don't let it run wild. + +10➜ build and plan for humans: start small, test, improve. always have a plan for when the agent gets stuck or is about to do something high-risk. + +Download: https://t.co/fJaCkgf7ph","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f805de9c81260ff8881ee/e2TT1ckmS-YS8y6Aco0Wn.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f805de9c81260ff8881ee/MY7YHxckUucsNPGedsZpo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f805de9c81260ff8881ee/dtcyWfo3wDmjnKWvrutDE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f805de9c81260ff8881ee/4KyB1wcAXQwbLIxIRL4IM.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f805de9c81260ff8881ee/tF1Q6cFy3yYYncVspT_NY.png'}]",[],"[{'reaction': '❤️', 'users': ['hesamation', 'berito', 'joamag'], 'count': 3}, {'reaction': '🔥', 'users': ['BoneMechanic', 'salmankhanpm'], 'count': 2}, {'reaction': '👍', 'users': ['eugenesiow', 'okamirvs'], 'count': 2}, {'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-04-17 20:29:12,2025-04-18 07:52:35.835,"[{'_id': '6491b217e4908018967a17e9', 'avatarUrl': '/avatars/aa18bae34d7fb5d7533b90cc60187cd1.svg', 'fullname': 'Rakshith G', 'name': 'BoneMechanic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '647f805de9c81260ff8881ee', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/647f805de9c81260ff8881ee/WXfMTAlKuHAi4M1fySCJ8.jpeg', 'fullname': 'ℏεsam', 'name': 'hesamation', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 178, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/hesamation/750913380201236,2203,"{'language': 'en', 'probability': 0.9190253615379333}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,250892735426086,"[{'type': 'text', 'value': 'o4-mini beats o3-mini, and gets very close to SOTA 😄', 'raw': 'o4-mini beats o3-mini, and gets very close to SOTA 😄'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'onekq-ai/WebApp1K-models-leaderboard'}, 'url': 'https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard', 'raw': 'https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard'}]","o4-mini beats o3-mini, and gets very close to SOTA 😄 + +https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-04-17 18:08:15,2025-04-17 20:01:26.714,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/onekq/250892735426086,484,"{'language': 'en', 'probability': 0.6703316569328308}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,926684469376880,"[{'type': 'text', 'value': 'Wan2.1-FLF2V🎥 a 14B start-end frame video generation model just released by Alibaba_Wan🔥', 'raw': 'Wan2.1-FLF2V🎥 a 14B start-end frame video generation model just released by Alibaba_Wan🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Wan-AI/Wan2.1-FLF2V-14B-720P'}, 'url': 'https://huggingface.co/Wan-AI/Wan2.1-FLF2V-14B-720P#run-first-last-frame-to-video-generation', 'raw': 'https://huggingface.co/Wan-AI/Wan2.1-FLF2V-14B-720P#run-first-last-frame-to-video-generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Give it two images (start & end), it generates a smooth, high-quality video in between.', 'raw': '✨ Give it two images (start & end), it generates a smooth, high-quality video in between.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Apache 2.0 licensed ', 'raw': '✨ Apache 2.0 licensed '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Built on DiT + Flow Matching', 'raw': '✨ Built on DiT + Flow Matching'}]","Wan2.1-FLF2V🎥 a 14B start-end frame video generation model just released by Alibaba_Wan🔥 + +https://huggingface.co/Wan-AI/Wan2.1-FLF2V-14B-720P#run-first-last-frame-to-video-generation + +✨ Give it two images (start & end), it generates a smooth, high-quality video in between. +✨ Apache 2.0 licensed +✨ Built on DiT + Flow Matching","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/Fahtf-k3guyyJwlFo2yhd.mp4'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'jasonchenjy', 'C50BARZ', 'GeneralGost', 'phihoang98'], 'count': 5}, {'reaction': '🤗', 'users': ['JLouisBiz'], 'count': 1}, {'reaction': '❤️', 'users': ['rsshekhawat'], 'count': 1}]",2025-04-17 16:08:46,2025-04-17 18:56:33.137,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/AdinaY/926684469376880,2157,"{'language': 'en', 'probability': 0.7870509624481201}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YdisMmTuCDj0gVK2TDml7.jpeg,3.0,EricSondhi,sondhiArm,544279735060969,"[{'type': 'text', 'value': 'At Arm, we’re trying something a bit different - a new series of live code-alongs and Q&A sessions led by our engineers to support developers building, optimizing, and deploying cloud-native applications.', 'raw': 'At Arm, we’re trying something a bit different - a new series of live code-alongs and Q&A sessions led by our engineers to support developers building, optimizing, and deploying cloud-native applications.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'link', 'href': 'https://www.arm.com/resources/webinar/code-along-arm-cloud-migration', 'raw': 'https://www.arm.com/resources/webinar/code-along-arm-cloud-migration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'There are four live code-alongs, each followed by a “Connect with the Experts” session one week later.', 'raw': 'There are four live code-alongs, each followed by a “Connect with the Experts” session one week later.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The first two sessions focus on using Hugging Face with Arm:', 'raw': 'The first two sessions focus on using Hugging Face with Arm:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tApr 22: Build a RAG app with vector search and LLMs, optimized for Arm', 'raw': '\t•\tApr 22: Build a RAG app with vector search and LLMs, optimized for Arm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tApr 30: Run LLaMA with PyTorch on Arm-based infrastructure', 'raw': '\t•\tApr 30: Run LLaMA with PyTorch on Arm-based infrastructure'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you're interested in the topics, you can sign up for one or more sessions. Each session includes time to ask questions directly to the Arm team."", 'raw': ""If you're interested in the topics, you can sign up for one or more sessions. Each session includes time to ask questions directly to the Arm team.""}]","At Arm, we’re trying something a bit different - a new series of live code-alongs and Q&A sessions led by our engineers to support developers building, optimizing, and deploying cloud-native applications. + + https://www.arm.com/resources/webinar/code-along-arm-cloud-migration + +There are four live code-alongs, each followed by a “Connect with the Experts” session one week later. + +The first two sessions focus on using Hugging Face with Arm: + • Apr 22: Build a RAG app with vector search and LLMs, optimized for Arm + • Apr 30: Run LLaMA with PyTorch on Arm-based infrastructure + +If you're interested in the topics, you can sign up for one or more sessions. Each session includes time to ask questions directly to the Arm team.",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'Cloudy-Boom'], 'count': 2}]",2025-04-17 16:00:36,2025-04-17 16:23:57.114,[],/posts/sondhiArm/544279735060969,1102,"{'language': 'en', 'probability': 0.9168171286582947}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,225696597302421,"[{'type': 'text', 'value': '🔥 New reasoning models from the Chinese community, by Skywork 天工-昆仑万维', 'raw': '🔥 New reasoning models from the Chinese community, by Skywork 天工-昆仑万维'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Skywork/skywork-or1-67fa1bcb41b436ef2def76b9'}, 'url': 'https://huggingface.co/collections/Skywork/skywork-or1-67fa1bcb41b436ef2def76b9', 'raw': 'https://huggingface.co/collections/Skywork/skywork-or1-67fa1bcb41b436ef2def76b9'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Skywork OR1-Math-7B > Optimized for math reasoning', 'raw': '✨Skywork OR1-Math-7B > Optimized for math reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Skywork-OR1-7B-preview > Excels in math & coding', 'raw': '✨Skywork-OR1-7B-preview > Excels in math & coding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Skywork-OR1-32B-preview > Matches Deepseek-R1 on math (AIME24/25) and coding (LiveCodeBench)', 'raw': '✨Skywork-OR1-32B-preview > Matches Deepseek-R1 on math (AIME24/25) and coding (LiveCodeBench)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Released under the Apache 2.0 license 🥳', 'raw': 'Released under the Apache 2.0 license 🥳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Final version coming in 2 weeks!', 'raw': 'Final version coming in 2 weeks!'}, {'type': 'new_line', 'raw': '\n'}]","🔥 New reasoning models from the Chinese community, by Skywork 天工-昆仑万维 + +https://huggingface.co/collections/Skywork/skywork-or1-67fa1bcb41b436ef2def76b9 + +✨Skywork OR1-Math-7B > Optimized for math reasoning +✨Skywork-OR1-7B-preview > Excels in math & coding +✨Skywork-OR1-32B-preview > Matches Deepseek-R1 on math (AIME24/25) and coding (LiveCodeBench) + +Released under the Apache 2.0 license 🥳 +Final version coming in 2 weeks! +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/LvVv0D9jju57lr7wGO96J.png'}]",[],"[{'reaction': '❤️', 'users': ['zzffss', 'Makar7', 'John6666', 'Kazexu', 'theainerd', 'chrisliu298', 'victor', 'Takugen'], 'count': 8}]",2025-04-14 10:14:24,2025-04-14 10:14:53.792,[],/posts/AdinaY/225696597302421,3304,"{'language': 'en', 'probability': 0.7041080594062805}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png,65.0,Jean Louis,JLouisBiz,992335749959714,"[{'type': 'text', 'value': 'If you are using llama.CPP then From time to time you may have a need to quickly review your HTML output. And there is no automatic way to do it in its native web UI. This small shell script can help you integrate with your copy function. Just press on copy and invoke the shell script. You can make a small icon to invoke the shell script or bind it to the key or mouse button.', 'raw': 'If you are using llama.CPP then From time to time you may have a need to quickly review your HTML output. And there is no automatic way to do it in its native web UI. This small shell script can help you integrate with your copy function. Just press on copy and invoke the shell script. You can make a small icon to invoke the shell script or bind it to the key or mouse button.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Shell script is here:', 'raw': 'Shell script is here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://gitea.com/gnusupport/LLM-Helpers/src/branch/main/bin/clipboard-to-firefox.sh', 'raw': 'https://gitea.com/gnusupport/LLM-Helpers/src/branch/main/bin/clipboard-to-firefox.sh'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And video demonstration is here: ', 'raw': 'And video demonstration is here: '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=WCu3TazXpgg', 'raw': 'https://www.youtube.com/watch?v=WCu3TazXpgg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Join my Discord for LLM integration: ', 'raw': 'Join my Discord for LLM integration: '}, {'type': 'link', 'href': 'https://discord.gg/N2BRPZ2jKb', 'raw': 'https://discord.gg/N2BRPZ2jKb'}, {'type': 'new_line', 'raw': '\n'}]","If you are using llama.CPP then From time to time you may have a need to quickly review your HTML output. And there is no automatic way to do it in its native web UI. This small shell script can help you integrate with your copy function. Just press on copy and invoke the shell script. You can make a small icon to invoke the shell script or bind it to the key or mouse button. + +Shell script is here: + +https://gitea.com/gnusupport/LLM-Helpers/src/branch/main/bin/clipboard-to-firefox.sh + +And video demonstration is here: https://www.youtube.com/watch?v=WCu3TazXpgg + +Join my Discord for LLM integration: https://discord.gg/N2BRPZ2jKb +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6758a9850e3fff481964ca6d/E600IYs4xrwD-OFqDyBj3.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '🔥', 'users': ['Takugen'], 'count': 1}]",2025-04-14 09:23:39,2025-04-14 09:23:39.156,[],/posts/JLouisBiz/992335749959714,536,"{'language': 'en', 'probability': 0.7520124912261963}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63de560a15266dd945f209ca/PeZf3IF-x7Qh8OcnKH12R.png,95.0,MrDragonFox,MrDragonFox,123791005245279,"[{'type': 'text', 'value': 'yet a other audio datasets pre classified for events + audio aestetics', 'raw': 'yet a other audio datasets pre classified for events + audio aestetics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'this time for german - 680h sampled from emilia yodas ', 'raw': 'this time for german - 680h sampled from emilia yodas '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'timestamps for asr training or other fancier things available as nc in the raw repo ', 'raw': 'timestamps for asr training or other fancier things available as nc in the raw repo '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'MrDragonFox/DE_Emilia_Yodas_680h'}, 'url': 'https://huggingface.co/datasets/MrDragonFox/DE_Emilia_Yodas_680h', 'raw': 'https://huggingface.co/datasets/MrDragonFox/DE_Emilia_Yodas_680h'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'cc by 4.0 as by emilia yodas ', 'raw': 'cc by 4.0 as by emilia yodas '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'raw events / transcriptions are cc by NC 4.0', 'raw': 'raw events / transcriptions are cc by NC 4.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'MrDragonFox/DE_Emilia_Yodas_680h_raw_timestamps'}, 'url': 'https://huggingface.co/datasets/MrDragonFox/DE_Emilia_Yodas_680h_raw_timestamps', 'raw': 'https://huggingface.co/datasets/MrDragonFox/DE_Emilia_Yodas_680h_raw_timestamps'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'the coming days i should push about 600h english + some japanese too same format ', 'raw': 'the coming days i should push about 600h english + some japanese too same format '}]","yet a other audio datasets pre classified for events + audio aestetics + +this time for german - 680h sampled from emilia yodas + +timestamps for asr training or other fancier things available as nc in the raw repo + +https://huggingface.co/datasets/MrDragonFox/DE_Emilia_Yodas_680h + +cc by 4.0 as by emilia yodas + +raw events / transcriptions are cc by NC 4.0 + +https://huggingface.co/datasets/MrDragonFox/DE_Emilia_Yodas_680h_raw_timestamps + +the coming days i should push about 600h english + some japanese too same format ",[],[],"[{'reaction': '👍', 'users': ['John6666', 'Etherll', 'setianke', 'Takugen', 'rajivlmnt'], 'count': 5}, {'reaction': '🔥', 'users': ['kipjay', 'Etherll', 'rajivlmnt'], 'count': 3}]",2025-04-14 08:22:11,2025-04-14 08:23:31.626,[],/posts/MrDragonFox/123791005245279,4167,"{'language': 'en', 'probability': 0.8251487612724304}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/z82tUCF_X18mMaP7m0NCQ.png,924.0,openfree,openfree,305569626054328,"[{'type': 'text', 'value': 'Agentic AI Era: Analyzing MCP vs MCO 🚀', 'raw': 'Agentic AI Era: Analyzing MCP vs MCO 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hello everyone!', 'raw': 'Hello everyone!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With the rapid advancement of AI agent technology, two architectures have come into the spotlight: MCP (Model Context Protocol) and MCO (Model Context Open-json). Today, we’ll introduce the key features and differences of these two approaches.', 'raw': 'With the rapid advancement of AI agent technology, two architectures have come into the spotlight: MCP (Model Context Protocol) and MCO (Model Context Open-json). Today, we’ll introduce the key features and differences of these two approaches.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'VIDraft/Agentic-AI-CHAT'}, 'url': 'https://huggingface.co/spaces/VIDraft/Agentic-AI-CHAT', 'raw': 'https://huggingface.co/spaces/VIDraft/Agentic-AI-CHAT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MCP: The Traditional Approach 🏛️', 'raw': 'MCP: The Traditional Approach 🏛️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Centralized Function Registry: All functions are hardcoded into the core system.', 'raw': 'Centralized Function Registry: All functions are hardcoded into the core system.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Static Function Definitions & Tight Coupling: New features require changes to the core application code, limiting scalability.', 'raw': 'Static Function Definitions & Tight Coupling: New features require changes to the core application code, limiting scalability.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Monolithic Design: Complex deployment and version management can cause a single error to affect the whole system.', 'raw': 'Monolithic Design: Complex deployment and version management can cause a single error to affect the whole system.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code Example:', 'raw': 'Code Example:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""'''py"", 'raw': ""'''py""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FUNCTION_REGISTRY = {', 'raw': 'FUNCTION_REGISTRY = {'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ""existing_function"": existing_function,', 'raw': ' ""existing_function"": existing_function,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ""new_function"": new_function # Adding a new function', 'raw': ' ""new_function"": new_function # Adding a new function'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '}', 'raw': '}'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""'''"", 'raw': ""'''""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MCO: A Revolutionary Approach 🆕', 'raw': 'MCO: A Revolutionary Approach 🆕'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'JSON-based Function Definitions: Function details are stored in external JSON files, enabling dynamic module loading.', 'raw': 'JSON-based Function Definitions: Function details are stored in external JSON files, enabling dynamic module loading.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Loose Coupling & Microservices: Each function can be developed, tested, and deployed as an independent module.', 'raw': 'Loose Coupling & Microservices: Each function can be developed, tested, and deployed as an independent module.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Flexible Scalability: Add new features by simply updating the JSON and module files, without modifying the core system.', 'raw': 'Flexible Scalability: Add new features by simply updating the JSON and module files, without modifying the core system.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'JSON Example:', 'raw': 'JSON Example:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[', 'raw': '['}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' {', 'raw': ' {'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ""name"": ""analyze_sentiment"",', 'raw': ' ""name"": ""analyze_sentiment"",'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ""module_path"": ""nlp_tools"",', 'raw': ' ""module_path"": ""nlp_tools"",'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ""func_name_in_module"": ""sentiment_analysis"",', 'raw': ' ""func_name_in_module"": ""sentiment_analysis"",'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ""example_usage"": ""analyze_sentiment(text=\\""I love this product!\\"")""', 'raw': ' ""example_usage"": ""analyze_sentiment(text=\\""I love this product!\\"")""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' }', 'raw': ' }'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ']', 'raw': ']'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Why MCO? 💡', 'raw': 'Why MCO? 💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enhanced Development Efficiency: Developers can focus on their own modules with independent testing and deployment.', 'raw': 'Enhanced Development Efficiency: Developers can focus on their own modules with independent testing and deployment.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Simplified Error Management: Errors remain confined within their modules, enabling quick hotfixes.', 'raw': 'Simplified Error Management: Errors remain confined within their modules, enabling quick hotfixes.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Future-Proofing: With potential features like remote function calls (RPC), access control, auto-documentation, and a function marketplace, MCO paves the way for rapid innovation.', 'raw': 'Future-Proofing: With potential features like remote function calls (RPC), access control, auto-documentation, and a function marketplace, MCO paves the way for rapid innovation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Practical Use & Community 🤝', 'raw': 'Practical Use & Community 🤝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The MCO implementation has been successfully tested on Vidraft’s LLM (based on Google Gemma-3)', 'raw': 'The MCO implementation has been successfully tested on Vidraft’s LLM (based on Google Gemma-3)'}]","Agentic AI Era: Analyzing MCP vs MCO 🚀 + +Hello everyone! +With the rapid advancement of AI agent technology, two architectures have come into the spotlight: MCP (Model Context Protocol) and MCO (Model Context Open-json). Today, we’ll introduce the key features and differences of these two approaches. + +https://huggingface.co/spaces/VIDraft/Agentic-AI-CHAT + +MCP: The Traditional Approach 🏛️ +Centralized Function Registry: All functions are hardcoded into the core system. + +Static Function Definitions & Tight Coupling: New features require changes to the core application code, limiting scalability. + +Monolithic Design: Complex deployment and version management can cause a single error to affect the whole system. + +Code Example: +'''py +FUNCTION_REGISTRY = { + ""existing_function"": existing_function, + ""new_function"": new_function # Adding a new function +} +''' + +MCO: A Revolutionary Approach 🆕 +JSON-based Function Definitions: Function details are stored in external JSON files, enabling dynamic module loading. + +Loose Coupling & Microservices: Each function can be developed, tested, and deployed as an independent module. + +Flexible Scalability: Add new features by simply updating the JSON and module files, without modifying the core system. + +JSON Example: +[ + { + ""name"": ""analyze_sentiment"", + ""module_path"": ""nlp_tools"", + ""func_name_in_module"": ""sentiment_analysis"", + ""example_usage"": ""analyze_sentiment(text=\""I love this product!\"")"" + } +] + +Why MCO? 💡 +Enhanced Development Efficiency: Developers can focus on their own modules with independent testing and deployment. + +Simplified Error Management: Errors remain confined within their modules, enabling quick hotfixes. + +Future-Proofing: With potential features like remote function calls (RPC), access control, auto-documentation, and a function marketplace, MCO paves the way for rapid innovation. + +Practical Use & Community 🤝 +The MCO implementation has been successfully tested on Vidraft’s LLM (based on Google Gemma-3)","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66e54edddba1e4fee4500a5a/MjxRVPfvcGw0JWeKo_oHg.png'}]",[],"[{'reaction': '🔥', 'users': ['openfree', 'seawolf2357', 'micropluxom', 'denothund', 'veidosclicker', 'immunobiotech', 'polototo', 'powergen4ai', 'cosyinstreet', 'doiduom', 'ecoblade', 'kookoiom', 'hl13571', 'hazlegett', 'John6666', 'ginipick', 'fantos', 'aiqtech', 'fantaxy', 'aiqcamp', 'kolaslab', 'cutechicken', 'gunship999', 'Fishtiks', 'aust-t', 'marzwillson', 'dizzyhairnighy', 'Gtuca', 'usefleter', 'Takugen', 'BuiDoan', 'Caprivu', 'maylilyo'], 'count': 33}, {'reaction': '🚀', 'users': ['seawolf2357', 'micropluxom', 'denothund', 'veidosclicker', 'immunobiotech', 'doiduom', 'polototo', 'ecoblade', 'openfree', 'kolaslab', 'gunship999', 'powergen4ai', 'kookoiom', 'ginipick', 'cutechicken', 'aiqcamp', 'dizzyhairnighy', 'aiqtech'], 'count': 18}, {'reaction': '👀', 'users': ['seawolf2357', 'micropluxom', 'denothund', 'openfree', 'kolaslab', 'powergen4ai', 'kookoiom', 'ginipick', 'immunobiotech', 'gunship999', 'cutechicken', 'porkz', 'aiqcamp', 'polototo', 'dizzyhairnighy', 'aiqtech', 'doiduom', 'veidosclicker'], 'count': 18}, {'reaction': '❤️', 'users': ['seawolf2357', 'micropluxom', 'openfree', 'powergen4ai', 'kookoiom', 'ginipick', 'immunobiotech', 'gunship999', 'cutechicken', 'kolaslab', 'aiqcamp', 'denothund', 'polototo', 'dizzyhairnighy', 'jianghu88', 'aiqtech', 'doiduom', 'veidosclicker'], 'count': 18}, {'reaction': '🤗', 'users': ['seawolf2357', 'micropluxom', 'powergen4ai', 'kookoiom', 'openfree', 'aiqcamp', 'denothund', 'polototo', 'immunobiotech', 'ginipick', 'aiqtech', 'doiduom', 'dizzyhairnighy', 'cutechicken', 'veidosclicker'], 'count': 15}, {'reaction': '😎', 'users': ['seawolf2357', 'micropluxom', 'powergen4ai', 'kookoiom', 'openfree', 'aiqcamp', 'denothund', 'polototo', 'immunobiotech', 'aiqtech', 'doiduom', 'dizzyhairnighy', 'ginipick', 'cutechicken', 'veidosclicker'], 'count': 15}, {'reaction': '➕', 'users': ['seawolf2357', 'micropluxom', 'powergen4ai', 'kookoiom', 'openfree', 'aiqcamp', 'denothund', 'polototo', 'immunobiotech', 'aiqtech', 'doiduom', 'dizzyhairnighy', 'ginipick', 'cutechicken', 'veidosclicker'], 'count': 15}, {'reaction': '🧠', 'users': ['seawolf2357', 'powergen4ai', 'kookoiom', 'openfree', 'aiqcamp', 'denothund', 'micropluxom', 'polototo', 'immunobiotech', 'aiqtech', 'doiduom', 'dizzyhairnighy', 'ginipick', 'cutechicken', 'veidosclicker'], 'count': 15}, {'reaction': '🤝', 'users': ['aiqcamp', 'micropluxom', 'polototo', 'powergen4ai', 'DarkoButte', 'immunobiotech', 'doiduom', 'openfree'], 'count': 8}, {'reaction': '👍', 'users': ['powergen4ai', 'micropluxom', 'polototo', 'openfree', 'immunobiotech', 'doiduom', 'ginipick'], 'count': 7}, {'reaction': '😔', 'users': ['aiqcamp', 'micropluxom', 'polototo', 'doiduom', 'openfree', 'powergen4ai'], 'count': 6}, {'reaction': '🤯', 'users': ['cutechicken', 'denothund', 'micropluxom', 'polototo', 'doiduom'], 'count': 5}]",2025-04-14 08:16:53,2025-04-14 08:18:42.889,[],/posts/openfree/305569626054328,8397,"{'language': 'en', 'probability': 0.7801205515861511}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/BapzHmY4xylemybaIjAEn.jpeg,116.0,Joseph Robert Turcotte,Fishtiks,837021531467847,"[{'type': 'text', 'value': ""I want to process AI for free. I know about Hyra AI, Acurast, NATIX, and some other stuff you can do on your phone. I mean that I want to process toward your projects for free on my computer. I can do a little now, but I can do much more if I'm able to upgrade (nobody is telling me where they're getting H100s, but I may be able to get custom cards from the source). I was curious if any distributed processing is being done with PC and HPC, like BOINC and Folding@home, but specifically for AI, and I figured this is the place to ask."", 'raw': ""I want to process AI for free. I know about Hyra AI, Acurast, NATIX, and some other stuff you can do on your phone. I mean that I want to process toward your projects for free on my computer. I can do a little now, but I can do much more if I'm able to upgrade (nobody is telling me where they're getting H100s, but I may be able to get custom cards from the source). I was curious if any distributed processing is being done with PC and HPC, like BOINC and Folding@home, but specifically for AI, and I figured this is the place to ask.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""What projects can you recommend to put my CPU and GPU to use until I potentially get a dual CPU, dual to triple custom GPU, custom NPU, and mini-OPU setup, like Jean Zay, but smaller? I don't have that many resources to put to use currently, but I have more than the Androids I'm using for my Aiyara cluster for BOINC, so help me use the gaming PC for something more useful than gaming. I had somewhat promised that I'd offer the new setup to process for others, but I'm starting before I may even get it."", 'raw': ""What projects can you recommend to put my CPU and GPU to use until I potentially get a dual CPU, dual to triple custom GPU, custom NPU, and mini-OPU setup, like Jean Zay, but smaller? I don't have that many resources to put to use currently, but I have more than the Androids I'm using for my Aiyara cluster for BOINC, so help me use the gaming PC for something more useful than gaming. I had somewhat promised that I'd offer the new setup to process for others, but I'm starting before I may even get it.""}]","I want to process AI for free. I know about Hyra AI, Acurast, NATIX, and some other stuff you can do on your phone. I mean that I want to process toward your projects for free on my computer. I can do a little now, but I can do much more if I'm able to upgrade (nobody is telling me where they're getting H100s, but I may be able to get custom cards from the source). I was curious if any distributed processing is being done with PC and HPC, like BOINC and Folding@home, but specifically for AI, and I figured this is the place to ask. + +What projects can you recommend to put my CPU and GPU to use until I potentially get a dual CPU, dual to triple custom GPU, custom NPU, and mini-OPU setup, like Jean Zay, but smaller? I don't have that many resources to put to use currently, but I have more than the Androids I'm using for my Aiyara cluster for BOINC, so help me use the gaming PC for something more useful than gaming. I had somewhat promised that I'd offer the new setup to process for others, but I'm starting before I may even get it.",[],[],"[{'reaction': '🚀', 'users': ['tennisb', 'Takugen'], 'count': 2}]",2025-04-13 22:28:46,2025-06-17 17:36:35.004,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '6787f6033d342e4cc8d8c6cc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/BapzHmY4xylemybaIjAEn.jpeg', 'fullname': 'Joseph Robert Turcotte', 'name': 'Fishtiks', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 116, 'isFollowing': False}, {'_id': '67b8e0d79107c46e941a3fc8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/kjcJuaIcvP-1x4Hmf6LMt.png', 'fullname': 'Karen Akers', 'name': 'karenny', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Fishtiks/837021531467847,1164,"{'language': 'en', 'probability': 0.9776710271835327}",6 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,381466531674007,"[{'type': 'text', 'value': '🇷🇺 Russian Forum Messages Dataset - ', 'raw': '🇷🇺 Russian Forum Messages Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/ruforum'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/ruforum', 'raw': 'https://huggingface.co/datasets/nyuuzyou/ruforum'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection of approximately 58 million Russian forum messages featuring:', 'raw': 'Collection of approximately 58 million Russian forum messages featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Complete message content from Russian online forums spanning 2010-2025', 'raw': '- Complete message content from Russian online forums spanning 2010-2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Comprehensive metadata including unique message IDs and timestamps', 'raw': '- Comprehensive metadata including unique message IDs and timestamps'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Full text content preserving original user discussions and interactions', 'raw': '- Full text content preserving original user discussions and interactions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Monolingual dataset focused exclusively on Russian language content', 'raw': '- Monolingual dataset focused exclusively on Russian language content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset offers a unique textual archive of Russian online conversations suitable for text generation, sentiment analysis, and language modeling research. Released to the public domain under CC0 1.0 license.', 'raw': 'This dataset offers a unique textual archive of Russian online conversations suitable for text generation, sentiment analysis, and language modeling research. Released to the public domain under CC0 1.0 license.'}]","🇷🇺 Russian Forum Messages Dataset - https://huggingface.co/datasets/nyuuzyou/ruforum + +Collection of approximately 58 million Russian forum messages featuring: + +- Complete message content from Russian online forums spanning 2010-2025 +- Comprehensive metadata including unique message IDs and timestamps +- Full text content preserving original user discussions and interactions +- Monolingual dataset focused exclusively on Russian language content + +This dataset offers a unique textual archive of Russian online conversations suitable for text generation, sentiment analysis, and language modeling research. Released to the public domain under CC0 1.0 license.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'langutang', 'Americana11', 'Mindweller', 'iqbaloutlaw', 'Rexschwert', 'AlexWortega', 'vatolinalex', 'victor', 'Takugen', 'KaraKaraWitch'], 'count': 11}, {'reaction': '🔥', 'users': ['sudanenator', 'Rexschwert', 'AlexWortega', 'KaraKaraWitch', 'Hamzah-Asadullah', 'cahlen'], 'count': 6}]",2025-04-13 15:25:37,2025-04-13 16:11:14.278,[],/posts/nyuuzyou/381466531674007,5629,"{'language': 'en', 'probability': 0.7245180010795593}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/639ee12f0d679f53942e060f/wqAyuJ9kUtBzerdkc2-I9.jpeg,5.0,Katsuki-san Bakugou,katsukiai,571318779153247,"[{'type': 'text', 'value': 'DeepFocus datasets are not allowed to be used in cases where ', 'raw': 'DeepFocus datasets are not allowed to be used in cases where '}, {'type': 'inline_code', 'code': 'mean', 'raw': '`mean`'}, {'type': 'text', 'value': ' is used in that dataset', 'raw': ' is used in that dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""Why?\n├── This discussion is comments by the user. https://huggingface.co/JLouisBiz\n├── Hello,\n├── As a fork of a DeepSeek, you are required to give credit to DeepSeek according to the original MIT license. Could you please look into licensing terms and comply please?\n├── I also do not see why are you making your own license, why don't you simple leave it with original MIT license?\n└── I see that your license is also free software, but it brings legal problems when you are changing license, you are free to sublicense MIT licensed software, but re-licensing it without complying to initial terms is not allowed.\nUnlicensed\n├── DeepFocus\n├── Wrong license and using modified license (Unpaper provided @aide-julius)\n└── The dataset with the modified license does not use the same license as DeepSeek is using, EOS this license\nSymbol\n└── EOS\n └── End of service"", 'raw': ""```\nWhy?\n├── This discussion is comments by the user. https://huggingface.co/JLouisBiz\n├── Hello,\n├── As a fork of a DeepSeek, you are required to give credit to DeepSeek according to the original MIT license. Could you please look into licensing terms and comply please?\n├── I also do not see why are you making your own license, why don't you simple leave it with original MIT license?\n└── I see that your license is also free software, but it brings legal problems when you are changing license, you are free to sublicense MIT licensed software, but re-licensing it without complying to initial terms is not allowed.\nUnlicensed\n├── DeepFocus\n├── Wrong license and using modified license (Unpaper provided @aide-julius)\n└── The dataset with the modified license does not use the same license as DeepSeek is using, EOS this license\nSymbol\n└── EOS\n └── End of service\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thank you,', 'raw': 'Thank you,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Best Regards,', 'raw': 'Best Regards,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sam from The KatsukiAI Team', 'raw': 'Sam from The KatsukiAI Team'}]","DeepFocus datasets are not allowed to be used in cases where `mean` is used in that dataset + +``` +Why? +├── This discussion is comments by the user. https://huggingface.co/JLouisBiz +├── Hello, +├── As a fork of a DeepSeek, you are required to give credit to DeepSeek according to the original MIT license. Could you please look into licensing terms and comply please? +├── I also do not see why are you making your own license, why don't you simple leave it with original MIT license? +└── I see that your license is also free software, but it brings legal problems when you are changing license, you are free to sublicense MIT licensed software, but re-licensing it without complying to initial terms is not allowed. +Unlicensed +├── DeepFocus +├── Wrong license and using modified license (Unpaper provided @aide-julius) +└── The dataset with the modified license does not use the same license as DeepSeek is using, EOS this license +Symbol +└── EOS + └── End of service +``` +Thank you, +Best Regards, +Sam from The KatsukiAI Team",[],"[{'_id': '67b54c2b84e6900ef1a43254', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/i9r3Yzv9wyerqqtynEPYP.png', 'fullname': 'Aiden', 'name': 'aide-julius', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '🧠', 'users': ['John6666', 'katsukiai', 'Takugen', 'kulia-moon'], 'count': 4}]",2025-04-13 13:59:05,2025-04-13 19:11:10.892,"[{'_id': '639ee12f0d679f53942e060f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/639ee12f0d679f53942e060f/wqAyuJ9kUtBzerdkc2-I9.jpeg', 'fullname': 'Katsuki-san Bakugou', 'name': 'katsukiai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/katsukiai/571318779153247,4076,"{'language': 'en', 'probability': 0.9165529608726501}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1652986473945-60f2e74cadf471cbdf8bb663.jpeg,71.0,Rajiv Shah,rajistics,872485108771156,"[{'type': 'text', 'value': 'Having some fun with long context benchmarks (watch the video!!) ', 'raw': 'Having some fun with long context benchmarks (watch the video!!) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'NoLiMA: ', 'raw': 'NoLiMA: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.05167'}, 'url': 'https://huggingface.co/papers/2502.05167', 'raw': 'https://huggingface.co/papers/2502.05167', 'label': 'NoLiMa: Long-Context Evaluation Beyond Literal Matching (2502.05167)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fiction LiveBench: ', 'raw': 'Fiction LiveBench: '}, {'type': 'link', 'href': 'https://fiction.live/stories/Fiction-liveBench-Mar-25-2025/oQdzQvKHw8JyXbN87', 'raw': 'https://fiction.live/stories/Fiction-liveBench-Mar-25-2025/oQdzQvKHw8JyXbN87'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Michalenglo: ', 'raw': 'Michalenglo: '}, {'type': 'link', 'href': 'https://deepmind.google/research/publications/117639/', 'raw': 'https://deepmind.google/research/publications/117639/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LongGenBench: ', 'raw': 'LongGenBench: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2409.02076'}, 'url': 'https://huggingface.co/papers/2409.02076', 'raw': 'https://huggingface.co/papers/2409.02076', 'label': 'Spinning the Golden Thread: Benchmarking Long-Form Generation in\n Language Models (2409.02076)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'NeedleBench: ', 'raw': 'NeedleBench: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2407.11963'}, 'url': 'https://huggingface.co/papers/2407.11963', 'raw': 'https://huggingface.co/papers/2407.11963', 'label': 'NeedleBench: Can LLMs Do Retrieval and Reasoning in 1 Million Context\n Window? (2407.11963)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RULER: ', 'raw': 'RULER: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.06654'}, 'url': 'https://huggingface.co/papers/2404.06654', 'raw': 'https://huggingface.co/papers/2404.06654', 'label': ""RULER: What's the Real Context Size of Your Long-Context Language\n Models? (2404.06654)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For more: ', 'raw': 'For more: '}, {'type': 'link', 'href': 'https://www.reddit.com/r/rajistics/comments/1jxwk29/long_context_llm_benchmarks_video/', 'raw': 'https://www.reddit.com/r/rajistics/comments/1jxwk29/long_context_llm_benchmarks_video/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' let me know if you like these posts', 'raw': ' let me know if you like these posts'}]","Having some fun with long context benchmarks (watch the video!!) + +NoLiMA: https://huggingface.co/papers/2502.05167 +Fiction LiveBench: https://fiction.live/stories/Fiction-liveBench-Mar-25-2025/oQdzQvKHw8JyXbN87 +Michalenglo: https://deepmind.google/research/publications/117639/ +LongGenBench: https://huggingface.co/papers/2409.02076 +NeedleBench: https://huggingface.co/papers/2407.11963 +RULER: https://huggingface.co/papers/2404.06654 + +For more: https://www.reddit.com/r/rajistics/comments/1jxwk29/long_context_llm_benchmarks_video/ + + let me know if you like these posts","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f2e74cadf471cbdf8bb663/pzQINr0VGk4pV0Pg8LzVg.mp4'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'Fishtiks', 'rjmehta', 'uretzky'], 'count': 4}]",2025-04-13 13:16:39,2025-04-13 13:17:08.844,[],/posts/rajistics/872485108771156,3509,"{'language': 'en', 'probability': 0.6296384930610657}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,504548986821494,"[{'type': 'text', 'value': '16 new research on inference-time scaling:', 'raw': '16 new research on inference-time scaling:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""For the last couple of weeks a large amount of studies on inference-time scaling has emerged. And it's so cool, because each new paper adds a trick to the toolbox, making LLMs more capable without needing to scale parameter count of the models."", 'raw': ""For the last couple of weeks a large amount of studies on inference-time scaling has emerged. And it's so cool, because each new paper adds a trick to the toolbox, making LLMs more capable without needing to scale parameter count of the models.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So here are 13 new methods + 3 comprehensive studies on test-time scaling:', 'raw': 'So here are 13 new methods + 3 comprehensive studies on test-time scaling:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. ', 'raw': '1. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.02495'}, 'url': 'https://huggingface.co/papers/2504.02495', 'raw': 'https://huggingface.co/papers/2504.02495', 'label': 'Inference-Time Scaling for Generalist Reward Modeling (2504.02495)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Probably, the most popular study. It proposes to boost inference-time scalability by improving reward modeling. To enhance performance, DeepSeek-GRM uses adaptive critiques, parallel sampling, pointwise generative RM, and Self-Principled Critique Tuning (SPCT)', 'raw': 'Probably, the most popular study. It proposes to boost inference-time scalability by improving reward modeling. To enhance performance, DeepSeek-GRM uses adaptive critiques, parallel sampling, pointwise generative RM, and Self-Principled Critique Tuning (SPCT)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. ', 'raw': '2. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.04718'}, 'url': 'https://huggingface.co/papers/2504.04718', 'raw': 'https://huggingface.co/papers/2504.04718', 'label': 'T1: Tool-integrated Self-verification for Test-time Compute Scaling in\n Small Language Models (2504.04718)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Allows small models to use external tools, like code interpreters and calculator, to enhance self-verification', 'raw': 'Allows small models to use external tools, like code interpreters and calculator, to enhance self-verification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. ', 'raw': '3. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.00810'}, 'url': 'https://huggingface.co/papers/2504.00810', 'raw': 'https://huggingface.co/papers/2504.00810', 'label': 'Z1: Efficient Test-time Scaling with Code (2504.00810)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Proposes to train LLMs on code-based reasoning paths to make test-time scaling more efficient, limiting unnecessary tokens with a special dataset and a Shifted Thinking Window', 'raw': 'Proposes to train LLMs on code-based reasoning paths to make test-time scaling more efficient, limiting unnecessary tokens with a special dataset and a Shifted Thinking Window'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. ', 'raw': '4. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.00891'}, 'url': 'https://huggingface.co/papers/2504.00891', 'raw': 'https://huggingface.co/papers/2504.00891', 'label': 'GenPRM: Scaling Test-Time Compute of Process Reward Models via\n Generative Reasoning (2504.00891)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Introduces GenPRM, a generative PRM, that uses CoT reasoning and code verification for step-by-step judgment. With only 23K training examples, GenPRM outperforms prior PRMs and larger models', 'raw': 'Introduces GenPRM, a generative PRM, that uses CoT reasoning and code verification for step-by-step judgment. With only 23K training examples, GenPRM outperforms prior PRMs and larger models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. ', 'raw': '5. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.24320'}, 'url': 'https://huggingface.co/papers/2503.24320', 'raw': 'https://huggingface.co/papers/2503.24320', 'label': 'Can Test-Time Scaling Improve World Foundation Model? (2503.24320)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""SWIFT test-time scaling framework improves World Models' performance without retraining, using strategies like fast tokenization, Top-K pruning, and efficient beam search"", 'raw': ""SWIFT test-time scaling framework improves World Models' performance without retraining, using strategies like fast tokenization, Top-K pruning, and efficient beam search""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. ', 'raw': '6. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.07104'}, 'url': 'https://huggingface.co/papers/2504.07104', 'raw': 'https://huggingface.co/papers/2504.07104', 'label': ""Relevance Isn't All You Need: Scaling RAG Systems With Inference-Time\n Compute Via Multi-Criteria Reranking (2504.07104)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Proposes REBEL for RAG systems scaling, which uses multi-criteria optimization with CoT prompting for better performance-speed tradeoffs as inference compute increases', 'raw': 'Proposes REBEL for RAG systems scaling, which uses multi-criteria optimization with CoT prompting for better performance-speed tradeoffs as inference compute increases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7. ', 'raw': '7. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.13288'}, 'url': 'https://huggingface.co/papers/2503.13288', 'raw': 'https://huggingface.co/papers/2503.13288', 'label': '$φ$-Decoding: Adaptive Foresight Sampling for Balanced Inference-Time\n Exploration and Exploitation (2503.13288)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Proposes a φ-Decoding strategy that uses foresight sampling, clustering and adaptive pruning to estimate and select optimal reasoning steps', 'raw': 'Proposes a φ-Decoding strategy that uses foresight sampling, clustering and adaptive pruning to estimate and select optimal reasoning steps'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read further below 👇', 'raw': 'Read further below 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also, subscribe to the Turing Post ', 'raw': 'Also, subscribe to the Turing Post '}, {'type': 'link', 'href': 'https://www.turingpost.com/subscribe', 'raw': 'https://www.turingpost.com/subscribe'}]","16 new research on inference-time scaling: + +For the last couple of weeks a large amount of studies on inference-time scaling has emerged. And it's so cool, because each new paper adds a trick to the toolbox, making LLMs more capable without needing to scale parameter count of the models. + +So here are 13 new methods + 3 comprehensive studies on test-time scaling: + +1. https://huggingface.co/papers/2504.02495 +Probably, the most popular study. It proposes to boost inference-time scalability by improving reward modeling. To enhance performance, DeepSeek-GRM uses adaptive critiques, parallel sampling, pointwise generative RM, and Self-Principled Critique Tuning (SPCT) + +2. https://huggingface.co/papers/2504.04718 +Allows small models to use external tools, like code interpreters and calculator, to enhance self-verification + +3. https://huggingface.co/papers/2504.00810 +Proposes to train LLMs on code-based reasoning paths to make test-time scaling more efficient, limiting unnecessary tokens with a special dataset and a Shifted Thinking Window + +4. https://huggingface.co/papers/2504.00891 +Introduces GenPRM, a generative PRM, that uses CoT reasoning and code verification for step-by-step judgment. With only 23K training examples, GenPRM outperforms prior PRMs and larger models + +5. https://huggingface.co/papers/2503.24320 +SWIFT test-time scaling framework improves World Models' performance without retraining, using strategies like fast tokenization, Top-K pruning, and efficient beam search + +6. https://huggingface.co/papers/2504.07104 +Proposes REBEL for RAG systems scaling, which uses multi-criteria optimization with CoT prompting for better performance-speed tradeoffs as inference compute increases + +7. https://huggingface.co/papers/2503.13288 +Proposes a φ-Decoding strategy that uses foresight sampling, clustering and adaptive pruning to estimate and select optimal reasoning steps + +Read further below 👇 + +Also, subscribe to the Turing Post https://www.turingpost.com/subscribe",[],[],"[{'reaction': '👍', 'users': ['iseesaw', 'John6666', 'Fishtiks', 'cxdv', 'Weiyun1025', 'Hwanjun', 'Dcas89', 'Kseniase', 'onthemove31', 'JohnRoger', 'owao'], 'count': 11}, {'reaction': '🔥', 'users': ['gmh14', 'JohnRoger'], 'count': 2}, {'reaction': '❤️', 'users': ['DarkoButte'], 'count': 1}]",2025-04-13 10:50:37,2025-04-14 07:50:32.244,"[{'_id': '64838b28c235ef76b63e4999', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg', 'fullname': 'Ksenia Se', 'name': 'Kseniase', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 971, 'isFollowing': False}, {'_id': '619507e7b74b6c591f794340', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/619507e7b74b6c591f794340/JbPDoy6Ko1V1-6oJJwFV8.jpeg', 'fullname': 'Weiyun Wang', 'name': 'Weiyun1025', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 24, 'isFollowing': False}]",/posts/Kseniase/504548986821494,5594,"{'language': 'en', 'probability': 0.7886908650398254}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,178119947720343,"[{'type': 'text', 'value': 'I just compared tasks with different input/output lengths. CPU/GPU performances are very different here.', 'raw': 'I just compared tasks with different input/output lengths. CPU/GPU performances are very different here.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The LLMs we use today are autoregressive or causal inference models, meaning the generation of each output token depends on all previous tokens. Since the model must generate one token at a time, it sets a hard limit on parallelism. The chatbot simulating human typing is in fact a UI trick to gloss over this fundamental limit. This is great news for CPUs because it levels the playing field.', 'raw': 'The LLMs we use today are autoregressive or causal inference models, meaning the generation of each output token depends on all previous tokens. Since the model must generate one token at a time, it sets a hard limit on parallelism. The chatbot simulating human typing is in fact a UI trick to gloss over this fundamental limit. This is great news for CPUs because it levels the playing field.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""But when processing input tokens, this limit doesn't exist. The GPU can fire up thousands of cores (vs dozens of CPU cores) to process as many input tokens as it can, all at once. Here, GPU enjoys a significant speed margin over CPU. The longer the prompt, the bigger the margin."", 'raw': ""But when processing input tokens, this limit doesn't exist. The GPU can fire up thousands of cores (vs dozens of CPU cores) to process as many input tokens as it can, all at once. Here, GPU enjoys a significant speed margin over CPU. The longer the prompt, the bigger the margin.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So, when it comes to user experience, both GPU and CPU can output text at decent speed. What really distinguishes them is the initial wait time, i.e. prompt processing delay.', 'raw': 'So, when it comes to user experience, both GPU and CPU can output text at decent speed. What really distinguishes them is the initial wait time, i.e. prompt processing delay.'}]","I just compared tasks with different input/output lengths. CPU/GPU performances are very different here. + +The LLMs we use today are autoregressive or causal inference models, meaning the generation of each output token depends on all previous tokens. Since the model must generate one token at a time, it sets a hard limit on parallelism. The chatbot simulating human typing is in fact a UI trick to gloss over this fundamental limit. This is great news for CPUs because it levels the playing field. + +But when processing input tokens, this limit doesn't exist. The GPU can fire up thousands of cores (vs dozens of CPU cores) to process as many input tokens as it can, all at once. Here, GPU enjoys a significant speed margin over CPU. The longer the prompt, the bigger the margin. + +So, when it comes to user experience, both GPU and CPU can output text at decent speed. What really distinguishes them is the initial wait time, i.e. prompt processing delay.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-04-13 07:00:17,2025-04-13 07:12:42.502,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/onekq/178119947720343,702,"{'language': 'en', 'probability': 0.9029545187950134}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/YUSxG-V5LdEjbAjxLwMZg.png,,Alpel Golden,App-Girl,315609622457956,"[{'type': 'text', 'value': 'The Rise of AI-Powered Dating: Revolutionizing Relationships or Playing with Human Emotions?', 'raw': 'The Rise of AI-Powered Dating: Revolutionizing Relationships or Playing with Human Emotions?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI in dating apps is transforming how we meet people. From personalized recommendations to virtual assistants that help navigate conversations, technology is playing a growing role. But is this reliance on algorithms taking the “human” out of relationships?', 'raw': 'AI in dating apps is transforming how we meet people. From personalized recommendations to virtual assistants that help navigate conversations, technology is playing a growing role. But is this reliance on algorithms taking the “human” out of relationships?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pros:', 'raw': 'Pros:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Improved matchmaking: AI can analyze preferences, behaviors, and even conversations to suggest more compatible partners.', 'raw': 'Improved matchmaking: AI can analyze preferences, behaviors, and even conversations to suggest more compatible partners.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Less judgment: AI removes initial biases based on appearance and first impressions.', 'raw': 'Less judgment: AI removes initial biases based on appearance and first impressions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enhanced personalization: Apps can refine recommendations based on deeper preferences.', 'raw': 'Enhanced personalization: Apps can refine recommendations based on deeper preferences.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cons:', 'raw': 'Cons:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lack of true emotional connection: Can an algorithm truly understand human feelings?', 'raw': 'Lack of true emotional connection: Can an algorithm truly understand human feelings?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Over-reliance on AI: What happens when algorithms miss the subtleties of human attraction?', 'raw': 'Over-reliance on AI: What happens when algorithms miss the subtleties of human attraction?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Privacy concerns: With AI processing personal data, there are fears about how this information is used.', 'raw': 'Privacy concerns: With AI processing personal data, there are fears about how this information is used.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What do you think? Are we heading toward a more efficient way of dating, or are we sacrificing authenticity for convenience? Let’s discuss.', 'raw': 'What do you think? Are we heading toward a more efficient way of dating, or are we sacrificing authenticity for convenience? Let’s discuss.'}]","The Rise of AI-Powered Dating: Revolutionizing Relationships or Playing with Human Emotions? + +AI in dating apps is transforming how we meet people. From personalized recommendations to virtual assistants that help navigate conversations, technology is playing a growing role. But is this reliance on algorithms taking the “human” out of relationships? + +Pros: + +Improved matchmaking: AI can analyze preferences, behaviors, and even conversations to suggest more compatible partners. + +Less judgment: AI removes initial biases based on appearance and first impressions. + +Enhanced personalization: Apps can refine recommendations based on deeper preferences. + +Cons: + +Lack of true emotional connection: Can an algorithm truly understand human feelings? + +Over-reliance on AI: What happens when algorithms miss the subtleties of human attraction? + +Privacy concerns: With AI processing personal data, there are fears about how this information is used. + +What do you think? Are we heading toward a more efficient way of dating, or are we sacrificing authenticity for convenience? Let’s discuss.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/67eeeddfbb8be8c19de22804/zGicm3O8Vc6Iy-9JO8fG5.png'}]",[],"[{'reaction': '🚀', 'users': ['dantezxcd', 'John6666', 'App-Girl', 'ChrisSacrumCor'], 'count': 4}]",2025-04-08 16:15:15,2025-04-09 22:18:51.532,"[{'_id': '67520ca3d2cf909e599f16f1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/xoaFkGKN3aINVctYC93GX.png', 'fullname': 'bri', 'name': 'edwarddddr', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '677c781d195b961b77455900', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/7MO4HkwJOLCpfEwVX8d_D.png', 'fullname': 'Thomas Lee', 'name': 'tomlee3ddesign', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/App-Girl/315609622457956,1897,"{'language': 'en', 'probability': 0.8965231776237488}",2 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,366847320511575,"[{'type': 'text', 'value': 'Initial AHA benchmark of Llama 4 Scout puts it in between Command R+ 1 and DeepSeek V3 0324. More numbers later when I do finer benchmark with more updated inference engines.', 'raw': 'Initial AHA benchmark of Llama 4 Scout puts it in between Command R+ 1 and DeepSeek V3 0324. More numbers later when I do finer benchmark with more updated inference engines.'}]",Initial AHA benchmark of Llama 4 Scout puts it in between Command R+ 1 and DeepSeek V3 0324. More numbers later when I do finer benchmark with more updated inference engines.,[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-04-08 16:12:05,2025-04-08 16:12:05.846,[],/posts/etemiz/366847320511575,585,"{'language': 'en', 'probability': 0.8022746443748474}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/60a76b174e24361791fe822d/inEvYwrd4z0xvRQN3ikdE.jpeg,160.0,Sylvain Lesage,severo,432555815225192,"[{'type': 'text', 'value': 'Need to convert CSV to Parquet?', 'raw': 'Need to convert CSV to Parquet?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use ', 'raw': 'Use '}, {'type': 'link', 'href': 'https://www.chatdb.ai/tools/csv-to-parquet-converter', 'raw': 'https://www.chatdb.ai/tools/csv-to-parquet-converter'}, {'type': 'text', 'value': '. It does the job instantly.', 'raw': '. It does the job instantly.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'cfahlgren1', 'raw': '@cfahlgren1'}, {'type': 'text', 'value': ' provides many other tools on his website. Approved and bookmarked!', 'raw': ' provides many other tools on his website. Approved and bookmarked!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Need to convert CSV to Parquet? + +Use https://www.chatdb.ai/tools/csv-to-parquet-converter. It does the job instantly. + +@cfahlgren1 provides many other tools on his website. Approved and bookmarked! + ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60a76b174e24361791fe822d/809eeGE2cSr5CjsjrWJiS.png'}]","[{'_id': '648a374f00f7a3374ee64b99', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/648a374f00f7a3374ee64b99/YPwSOrronoozwHbJchPn3.jpeg', 'fullname': 'Caleb Fahlgren', 'name': 'cfahlgren1', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 246}]","[{'reaction': '👀', 'users': ['John6666', 'dantezxcd'], 'count': 2}, {'reaction': '🚀', 'users': ['akahana'], 'count': 1}]",2025-04-08 15:19:49,2025-04-08 15:48:11.164,"[{'_id': '648a374f00f7a3374ee64b99', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/648a374f00f7a3374ee64b99/YPwSOrronoozwHbJchPn3.jpeg', 'fullname': 'Caleb Fahlgren', 'name': 'cfahlgren1', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 246, 'isFollowing': False}, {'_id': '60a76b174e24361791fe822d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60a76b174e24361791fe822d/inEvYwrd4z0xvRQN3ikdE.jpeg', 'fullname': 'Sylvain Lesage', 'name': 'severo', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 160, 'isFollowing': False}]",/posts/severo/432555815225192,2016,"{'language': 'en', 'probability': 0.897085964679718}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6758312797bb6949e67d4a47/bBAysCFgr32JZu4j_Ry1J.jpeg,18.0,daavoo,daavoo,485479652965540,"[{'type': 'text', 'value': 'After working on agent evaluation🔍🤖 the last weeks, we started to accumulate code to make trying different agent frameworks easier. From that code, we have built and just released a small library called ', 'raw': 'After working on agent evaluation🔍🤖 the last weeks, we started to accumulate code to make trying different agent frameworks easier. From that code, we have built and just released a small library called '}, {'type': 'inline_code', 'code': 'any-agent', 'raw': '`any-agent`'}, {'type': 'text', 'value': '. ', 'raw': '. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Give it a try and a ⭐: ', 'raw': 'Give it a try and a ⭐: '}, {'type': 'link', 'href': 'https://github.com/mozilla-ai/any-agent', 'raw': 'https://github.com/mozilla-ai/any-agent'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'py', 'code': 'from any_agent import AgentConfig, AgentFramework, AnyAgent\n\nagent = AnyAgent.create(\n AgentFramework(""smolagents""), # or openai, langchain, llama_index\n AgentConfig(\n model_id=""gpt-4o-mini""\n )\n)\nagent.run(""Which Agent Framework is the best??"")', 'raw': '```py\nfrom any_agent import AgentConfig, AgentFramework, AnyAgent\n\nagent = AnyAgent.create(\n AgentFramework(""smolagents""), # or openai, langchain, llama_index\n AgentConfig(\n model_id=""gpt-4o-mini""\n )\n)\nagent.run(""Which Agent Framework is the best??"")\n```'}]","After working on agent evaluation🔍🤖 the last weeks, we started to accumulate code to make trying different agent frameworks easier. From that code, we have built and just released a small library called `any-agent`. + + +Give it a try and a ⭐: https://github.com/mozilla-ai/any-agent + +```py +from any_agent import AgentConfig, AgentFramework, AnyAgent + +agent = AnyAgent.create( + AgentFramework(""smolagents""), # or openai, langchain, llama_index + AgentConfig( + model_id=""gpt-4o-mini"" + ) +) +agent.run(""Which Agent Framework is the best??"") +```",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd', 'shanezdz'], 'count': 3}, {'reaction': '🔥', 'users': ['calmodovar'], 'count': 1}]",2025-04-08 15:07:31,2025-04-10 09:20:25.806,[],/posts/daavoo/485479652965540,1837,"{'language': 'en', 'probability': 0.6880764961242676}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/617e7dbd129c9e67703ffe62/nJD_rI351t2c2QN_BpUBz.jpeg,41.0,Fatih C. Akyon,fcakyon,248454580146320,"[{'type': 'text', 'value': '🎉 GitHub selected the ultralytics computer vision project, known for its YOLOv8/YOLO11 real-time SOTA computer vision models, as one of the top 5 open-source projects for first-time contributors in 2024!', 'raw': '🎉 GitHub selected the ultralytics computer vision project, known for its YOLOv8/YOLO11 real-time SOTA computer vision models, as one of the top 5 open-source projects for first-time contributors in 2024!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the project: ', 'raw': 'Link to the project: '}, {'type': 'link', 'href': 'https://github.com/ultralytics/ultralytics', 'raw': 'https://github.com/ultralytics/ultralytics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the full GitHub 2024 recap report: ', 'raw': 'Link to the full GitHub 2024 recap report: '}, {'type': 'link', 'href': 'https://github.blog/news-insights/octoverse/octoverse-2024/', 'raw': 'https://github.blog/news-insights/octoverse/octoverse-2024/'}]","🎉 GitHub selected the ultralytics computer vision project, known for its YOLOv8/YOLO11 real-time SOTA computer vision models, as one of the top 5 open-source projects for first-time contributors in 2024! + +Link to the project: https://github.com/ultralytics/ultralytics + +Link to the full GitHub 2024 recap report: https://github.blog/news-insights/octoverse/octoverse-2024/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/617e7dbd129c9e67703ffe62/IWaR9imOUBmAUNflg_QpR.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'glenn-jocher', 'fcakyon', 'dantezxcd', 'pabloce', 'syun88', 'muhammadrizwanmunawar'], 'count': 7}, {'reaction': '❤️', 'users': ['glenn-jocher', 'fcakyon', 'dantezxcd', 'muhammadrizwanmunawar', 'fernando-bold', 'syun88'], 'count': 6}, {'reaction': '🚀', 'users': ['glenn-jocher', 'fcakyon', 'muhammadrizwanmunawar', 'TheSunIsRising'], 'count': 4}, {'reaction': '👀', 'users': ['glenn-jocher', 'fcakyon', 'muhammadrizwanmunawar'], 'count': 3}, {'reaction': '🤗', 'users': ['glenn-jocher', 'fcakyon', 'muhammadrizwanmunawar'], 'count': 3}, {'reaction': '🧠', 'users': ['muhammadrizwanmunawar', 'fernando-bold', 'fcakyon'], 'count': 3}]",2025-04-08 14:36:55,2025-04-09 04:39:50.732,"[{'_id': '60ad037a306d6873ec42d537', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60ad037a306d6873ec42d537/OHqdqRzPJA9zkEfKHpxIT.png', 'fullname': 'Glenn Jocher', 'name': 'glenn-jocher', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '63f268560a16587ea96a6a34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63f268560a16587ea96a6a34/4h-p4SK-ONJmyCrsLInYW.jpeg', 'fullname': 'Muhammad Rizwan Munawar', 'name': 'muhammadrizwanmunawar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/fcakyon/248454580146320,2668,"{'language': 'en', 'probability': 0.8353744149208069}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/665f02b35dbb28742489d3b1/JVK7VKRWI6wJwX2uKKV3F.png,70.0,Eric Chung,DawnC,553378321840890,"[{'type': 'text', 'value': 'New in PawMatchAI🐾 : Turn Your Dog Photos into Art!', 'raw': 'New in PawMatchAI🐾 : Turn Your Dog Photos into Art!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I’m excited to introduce a brand-new creative feature — Dog Style Transfer is now live on PawMatchAI!', 'raw': 'I’m excited to introduce a brand-new creative feature — Dog Style Transfer is now live on PawMatchAI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just upload your dog’s photo and transform it into 5 artistic styles:', 'raw': 'Just upload your dog’s photo and transform it into 5 artistic styles:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌸 Japanese Anime', 'raw': '🌸 Japanese Anime'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Classic Cartoon', 'raw': '📚 Classic Cartoon'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ Oil Painting', 'raw': '🖼️ Oil Painting'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Watercolor', 'raw': '🎨 Watercolor'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌆 Cyberpunk', 'raw': '🌆 Cyberpunk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All powered by Stable Diffusion and enhanced with smart prompt tuning to preserve your dog’s unique traits and breed identity , so the artwork stays true to your furry friend.', 'raw': 'All powered by Stable Diffusion and enhanced with smart prompt tuning to preserve your dog’s unique traits and breed identity , so the artwork stays true to your furry friend.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Whether you're creating a custom portrait or just having fun, this feature brings your pet photos to life in completely new ways."", 'raw': ""Whether you're creating a custom portrait or just having fun, this feature brings your pet photos to life in completely new ways.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And here’s a little secret: although it’s designed with dogs in mind, it actually works on any photo — cats, plush toys, even humans. Feel free to experiment!', 'raw': 'And here’s a little secret: although it’s designed with dogs in mind, it actually works on any photo — cats, plush toys, even humans. Feel free to experiment!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Results may not always be perfectly accurate, sometimes your photo might come back looking a little different, or even beyond your imagination. But that’s part of the fun! It’s all about creative surprises and letting the AI do its thing.', 'raw': 'Results may not always be perfectly accurate, sometimes your photo might come back looking a little different, or even beyond your imagination. But that’s part of the fun! It’s all about creative surprises and letting the AI do its thing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it now: ', 'raw': 'Try it now: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DawnC/PawMatchAI'}, 'url': 'https://huggingface.co/spaces/DawnC/PawMatchAI', 'raw': 'https://huggingface.co/spaces/DawnC/PawMatchAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If this new feature made you smile, a ❤️ for this space would mean a lot.', 'raw': 'If this new feature made you smile, a ❤️ for this space would mean a lot.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AIArt #StyleTransfer #StableDiffusion #ComputerVision #MachineLearning #DeepLearning', 'raw': '#AIArt #StyleTransfer #StableDiffusion #ComputerVision #MachineLearning #DeepLearning'}]","New in PawMatchAI🐾 : Turn Your Dog Photos into Art! + +I’m excited to introduce a brand-new creative feature — Dog Style Transfer is now live on PawMatchAI! + +Just upload your dog’s photo and transform it into 5 artistic styles: +🌸 Japanese Anime +📚 Classic Cartoon +🖼️ Oil Painting +🎨 Watercolor +🌆 Cyberpunk + +All powered by Stable Diffusion and enhanced with smart prompt tuning to preserve your dog’s unique traits and breed identity , so the artwork stays true to your furry friend. + +Whether you're creating a custom portrait or just having fun, this feature brings your pet photos to life in completely new ways. + +And here’s a little secret: although it’s designed with dogs in mind, it actually works on any photo — cats, plush toys, even humans. Feel free to experiment! + +Results may not always be perfectly accurate, sometimes your photo might come back looking a little different, or even beyond your imagination. But that’s part of the fun! It’s all about creative surprises and letting the AI do its thing. + +Try it now: https://huggingface.co/spaces/DawnC/PawMatchAI + +If this new feature made you smile, a ❤️ for this space would mean a lot. + +#AIArt #StyleTransfer #StableDiffusion #ComputerVision #MachineLearning #DeepLearning",[],[],"[{'reaction': '🔥', 'users': ['Isochris', 'John6666', 'nyuuzyou', 'svjack', 'Hurricane79', 'wind6012'], 'count': 6}, {'reaction': '🚀', 'users': ['Axe77', 'dantezxcd', 'svjack'], 'count': 3}, {'reaction': '❤️', 'users': ['John6666', 'dantezxcd', 'svjack'], 'count': 3}, {'reaction': '🤗', 'users': ['JLouisBiz', 'svjack'], 'count': 2}]",2025-04-08 14:12:54,2025-04-12 00:13:47.907,"[{'_id': '67e8b7f0385025c2de04b2af', 'avatarUrl': '/avatars/c8d36999b306354f5a7318413d180158.svg', 'fullname': 'Hank', 'name': 'Hurricane79', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/DawnC/553378321840890,2581,"{'language': 'en', 'probability': 0.8749151229858398}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/5fef4eb7770b06e11c2c6381/1NMdigjCGtn0yvQZSi5NJ.png,64.0,Alessandro Ercolani,giux78,396383572368245,"[{'type': 'text', 'value': 'LLAMA4 release highlight the importance of political and social bias. According to their own evaluation described in the release blog post:', 'raw': 'LLAMA4 release highlight the importance of political and social bias. According to their own evaluation described in the release blog post:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Refusals on contentious prompts dropped from 7% (hashtag#LLAMA 3.3) to under 2%', 'raw': '- Refusals on contentious prompts dropped from 7% (hashtag#LLAMA 3.3) to under 2%'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Unequal response refusals are now under 1%', 'raw': '- Unequal response refusals are now under 1%'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Political lean bias is said to be halved compared to hashtag#LLaMA 3.3 and comparable to Grok', 'raw': '- Political lean bias is said to be halved compared to hashtag#LLaMA 3.3 and comparable to Grok'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'However, we ', 'raw': 'However, we '}, {'type': 'mention', 'user': 'efederici', 'raw': '@efederici'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'mferraretto', 'raw': '@mferraretto'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'FinancialSupport', 'raw': '@FinancialSupport'}, {'type': 'text', 'value': ' and I released some weeks ago an independent open source benchmark called Propaganda to measure political bias in LLMs: ', 'raw': ' and I released some weeks ago an independent open source benchmark called Propaganda to measure political bias in LLMs: '}, {'type': 'link', 'href': 'https://github.com/mii-llm/propaganda', 'raw': 'https://github.com/mii-llm/propaganda'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the chart below, we evaluated multiple leading models on the basis of ratings across a range of prompts designed to expose ideological leanings. ', 'raw': 'In the chart below, we evaluated multiple leading models on the basis of ratings across a range of prompts designed to expose ideological leanings. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Despite Meta’s stated neutrality goals, LLAMA4 ranks at the very top in terms of total ratings aligned with a clear ideological bias. The models were tested on their ability to respond even-handedly to politically sensitive prompts. LLaMA 4 scored even higher than models known for strong alignment policies like GPT-4o.', 'raw': 'Despite Meta’s stated neutrality goals, LLAMA4 ranks at the very top in terms of total ratings aligned with a clear ideological bias. The models were tested on their ability to respond even-handedly to politically sensitive prompts. LLaMA 4 scored even higher than models known for strong alignment policies like GPT-4o.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLMs may be refusing less, but they still show bias through content framing. This suggests that refusal rates alone are not a sufficient measure of ideological bias. Relying solely on internal evaluations from AI labs also raises concerns about transparency and objectivity.', 'raw': 'LLMs may be refusing less, but they still show bias through content framing. This suggests that refusal rates alone are not a sufficient measure of ideological bias. Relying solely on internal evaluations from AI labs also raises concerns about transparency and objectivity.'}, {'type': 'new_line', 'raw': '\n'}]","LLAMA4 release highlight the importance of political and social bias. According to their own evaluation described in the release blog post: +- Refusals on contentious prompts dropped from 7% (hashtag#LLAMA 3.3) to under 2% +- Unequal response refusals are now under 1% +- Political lean bias is said to be halved compared to hashtag#LLaMA 3.3 and comparable to Grok + +However, we @efederici @mferraretto @FinancialSupport and I released some weeks ago an independent open source benchmark called Propaganda to measure political bias in LLMs: https://github.com/mii-llm/propaganda + +In the chart below, we evaluated multiple leading models on the basis of ratings across a range of prompts designed to expose ideological leanings. + +Despite Meta’s stated neutrality goals, LLAMA4 ranks at the very top in terms of total ratings aligned with a clear ideological bias. The models were tested on their ability to respond even-handedly to politically sensitive prompts. LLaMA 4 scored even higher than models known for strong alignment policies like GPT-4o. + +LLMs may be refusing less, but they still show bias through content framing. This suggests that refusal rates alone are not a sufficient measure of ideological bias. Relying solely on internal evaluations from AI labs also raises concerns about transparency and objectivity. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fef4eb7770b06e11c2c6381/ygyx1qZKeU_elCaZ3tClY.png'}]","[{'_id': '612246596d9ce900691744d2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/612246596d9ce900691744d2/9DlHVQDqblKz7QPTA6nDa.jpeg', 'fullname': 'Edoardo Federici', 'name': 'efederici', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 44}, {'_id': '648cca46d38113f34bf7cb72', 'avatarUrl': '/avatars/0f95fe632948f826a0585d1adf541f78.svg', 'fullname': 'Samuele Colombo', 'name': 'FinancialSupport', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23}, {'_id': '6586deb4bbb04840e35b5dbb', 'avatarUrl': '/avatars/24921b3a2600e145e6fc968164b25b9c.svg', 'fullname': 'Mattia Ferraretto', 'name': 'mferraretto', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '👀', 'users': ['John6666', 'ajiriro', 'dantezxcd', 'FlipTip', 'giux78'], 'count': 5}]",2025-04-08 10:21:16,2025-04-08 10:21:16.948,[],/posts/giux78/396383572368245,2313,"{'language': 'en', 'probability': 0.9479315280914307}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,100648510273860,"[{'type': 'text', 'value': 'IndexTTS 📢 a TTS built on XTTS + Tortoise, released by BiliBili - a Chinese video sharing platform/community. ', 'raw': 'IndexTTS 📢 a TTS built on XTTS + Tortoise, released by BiliBili - a Chinese video sharing platform/community. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'IndexTeam/Index-TTS'}, 'url': 'https://huggingface.co/IndexTeam/Index-TTS', 'raw': 'https://huggingface.co/IndexTeam/Index-TTS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'IndexTeam/IndexTTS'}, 'url': 'https://huggingface.co/spaces/IndexTeam/IndexTTS', 'raw': 'https://huggingface.co/spaces/IndexTeam/IndexTTS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Chinese pronunciation correction via pinyin ', 'raw': '✨Chinese pronunciation correction via pinyin '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Pause control via punctuation ', 'raw': '✨Pause control via punctuation '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Improved speaker conditioning & audio quality (BigVGAN2) ', 'raw': '✨Improved speaker conditioning & audio quality (BigVGAN2) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Trained on 10k+ hours', 'raw': '✨Trained on 10k+ hours'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","IndexTTS 📢 a TTS built on XTTS + Tortoise, released by BiliBili - a Chinese video sharing platform/community. +Model: https://huggingface.co/IndexTeam/Index-TTS +Demo: https://huggingface.co/spaces/IndexTeam/IndexTTS + +✨Chinese pronunciation correction via pinyin +✨Pause control via punctuation +✨Improved speaker conditioning & audio quality (BigVGAN2) +✨Trained on 10k+ hours + + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/_x2gOVeFIuj8KIz9OMr51.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'erikloo', 'dantezxcd', 'svjack', 'cahlen', 'ixtiyoruz312'], 'count': 6}]",2025-04-08 09:05:42,2025-04-09 21:21:03.640,"[{'_id': '636f3c69b0ebc048881db378', 'avatarUrl': '/avatars/8aaab676f66023255d397ba82b4bcb6e.svg', 'fullname': 'James Hunter Carter', 'name': 'jameshuntercarter', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}]",/posts/AdinaY/100648510273860,2402,"{'language': 'en', 'probability': 0.7146434187889099}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,522991510920659,"[{'type': 'text', 'value': 'MAYE🎈a from-scratch RL framework for Vision Language Models, released by GAIR - an active research group from the Chinese community.', 'raw': 'MAYE🎈a from-scratch RL framework for Vision Language Models, released by GAIR - an active research group from the Chinese community.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Minimal & transparent pipeline with standard tools', 'raw': '✨Minimal & transparent pipeline with standard tools'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Standardized eval to track training & reflection', 'raw': '✨Standardized eval to track training & reflection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Open Code & Dataset ', 'raw': '✨Open Code & Dataset '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/GAIR-NLP/MAYE?tab=readme-ov-file', 'raw': 'https://github.com/GAIR-NLP/MAYE?tab=readme-ov-file'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'ManTle/MAYE'}, 'url': 'https://huggingface.co/datasets/ManTle/MAYE', 'raw': 'https://huggingface.co/datasets/ManTle/MAYE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2504.02587'}, 'url': 'https://huggingface.co/papers/2504.02587', 'raw': 'https://huggingface.co/papers/2504.02587', 'label': 'Rethinking RL Scaling for Vision Language Models: A Transparent,\n From-Scratch Framework and Comprehensive Evaluation Scheme (2504.02587)'}, {'type': 'new_line', 'raw': '\n'}]","MAYE🎈a from-scratch RL framework for Vision Language Models, released by GAIR - an active research group from the Chinese community. + +✨Minimal & transparent pipeline with standard tools +✨Standardized eval to track training & reflection +✨Open Code & Dataset + +Code: +https://github.com/GAIR-NLP/MAYE?tab=readme-ov-file +Dataset: +https://huggingface.co/datasets/ManTle/MAYE +Paper: +https://huggingface.co/papers/2504.02587 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/9wtIMamJ-KOWiZhiUQkw-.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd', 'AtAndDev', 'martineden'], 'count': 4}]",2025-04-08 08:13:36,2025-04-09 18:42:37.890,"[{'_id': '61898e59c646d29e8ac6d214', 'avatarUrl': '/avatars/715ab84f0231708fbe1a4cde0ee1ca8a.svg', 'fullname': 'Eren Irmak', 'name': 'martineden', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}]",/posts/AdinaY/522991510920659,1842,"{'language': 'en', 'probability': 0.7627291083335876}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/62ecdc18b72a69615d6bd857/qAHhWJbSsmoezFHiErBUT.png,434.0,Daniel Han-Chen,danielhanchen,859959880164586,"[{'type': 'text', 'value': 'You can now run Llama 4 on your own local device! 🦙', 'raw': 'You can now run Llama 4 on your own local device! 🦙'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Run our Dynamic 1.78-bit and 2.71-bit Llama 4 GGUFs: ', 'raw': 'Run our Dynamic 1.78-bit and 2.71-bit Llama 4 GGUFs: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF'}, 'url': 'https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF', 'raw': 'https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can run them on llama.cpp and other inference engines. See our guide here: ', 'raw': 'You can run them on llama.cpp and other inference engines. See our guide here: '}, {'type': 'link', 'href': 'https://docs.unsloth.ai/basics/tutorial-how-to-run-and-fine-tune-llama-4', 'raw': 'https://docs.unsloth.ai/basics/tutorial-how-to-run-and-fine-tune-llama-4'}]","You can now run Llama 4 on your own local device! 🦙 +Run our Dynamic 1.78-bit and 2.71-bit Llama 4 GGUFs: +https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF + +You can run them on llama.cpp and other inference engines. See our guide here: https://docs.unsloth.ai/basics/tutorial-how-to-run-and-fine-tune-llama-4","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62ecdc18b72a69615d6bd857/vebLCIKgZZSNCZYWL25Wp.webp'}]",[],"[{'reaction': '🤗', 'users': ['shimmyshimmer', 'Kakaarot', 'JackCloudman', 'merterbak', 'John6666', 'Etherll', 'dantezxcd', 'Laikokwei', 'Doctor-Chad-PhD', 'shawon', 'victor', 'not-lain', 'sulfierry', 'TheSunIsRising'], 'count': 14}, {'reaction': '🔥', 'users': ['shimmyshimmer', 'Ruchir-T', 'prithivMLmods', 'codermert', 'John6666', 'Etherll', 'Laikokwei', 'Doctor-Chad-PhD', 'victor', 'not-lain'], 'count': 10}, {'reaction': '❤️', 'users': ['shimmyshimmer', 'eaddario', 'Etherll', 'Doctor-Chad-PhD', 'not-lain', 'Csplk'], 'count': 6}, {'reaction': '🚀', 'users': ['shimmyshimmer', 'manoumhd99', 'Etherll', 'Doctor-Chad-PhD', 'not-lain', 'onurozen'], 'count': 6}]",2025-04-08 06:04:57,2025-04-08 19:59:30.304,"[{'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/danielhanchen/859959880164586,5011,"{'language': 'en', 'probability': 0.5707833766937256}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,620280881336518,"[{'type': 'text', 'value': 'Llama models (arguably the most successful open AI models of all times) just represented 3% of total model downloads on Hugging Face in March.', 'raw': 'Llama models (arguably the most successful open AI models of all times) just represented 3% of total model downloads on Hugging Face in March.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'People and media like stories of winner takes all & one model/company to rule them all but the reality is much more nuanced than this!', 'raw': 'People and media like stories of winner takes all & one model/company to rule them all but the reality is much more nuanced than this!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kudos to all the small AI builders out there!', 'raw': 'Kudos to all the small AI builders out there!'}]","Llama models (arguably the most successful open AI models of all times) just represented 3% of total model downloads on Hugging Face in March. + +People and media like stories of winner takes all & one model/company to rule them all but the reality is much more nuanced than this! + +Kudos to all the small AI builders out there!",[],[],"[{'reaction': '🔥', 'users': ['wath5', 'kaupane', 'John6666', 'jessepisel', 'onekq', 'johnlockejrr', 'FiditeNemini', 'salmankhanpm', 'Tom-Neverwinter', 'dantezxcd', 'nyuuzyou', 'AtAndDev'], 'count': 12}]",2025-04-03 19:09:36,2025-04-04 20:14:03.532,"[{'_id': '65644e982bdaccfcd536aff1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4VOzArmrRaX_DUTxGmm59.jpeg', 'fullname': 'Charles McSneed', 'name': 'ChuckMcSneed', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 59, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}]",/posts/clem/620280881336518,2016,"{'language': 'en', 'probability': 0.9595025181770325}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,933495110003944,"[{'type': 'text', 'value': 'I heard someone saying 𝘃𝗼𝗶𝗰𝗲 assistants are the future, and someone else that 𝗠𝗖𝗣 will rule the AI world... So I decided to combine both!🚀', 'raw': 'I heard someone saying 𝘃𝗼𝗶𝗰𝗲 assistants are the future, and someone else that 𝗠𝗖𝗣 will rule the AI world... So I decided to combine both!🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Meet 𝐓𝐲𝐒𝐕𝐀 (𝗧𝘆pe𝗦cript 𝗩oice 𝗔ssistant, ', 'raw': 'Meet 𝐓𝐲𝐒𝐕𝐀 (𝗧𝘆pe𝗦cript 𝗩oice 𝗔ssistant, '}, {'type': 'link', 'href': 'https://github.com/AstraBert/TySVA', 'raw': 'https://github.com/AstraBert/TySVA'}, {'type': 'text', 'value': '), your (speaking) AI companion for everyday TypeScript programming tasks!🎙️', 'raw': '), your (speaking) AI companion for everyday TypeScript programming tasks!🎙️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TySVA is a skilled TypeScript expert and, to provide accurate and up-to-date responses, she leverages the following workflow:', 'raw': 'TySVA is a skilled TypeScript expert and, to provide accurate and up-to-date responses, she leverages the following workflow:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🗣️ If you talk to her, she converts the audio into a textual prompt, and use it a starting point to answer your questions (if you send a message, she'll use directly that💬)"", 'raw': ""🗣️ If you talk to her, she converts the audio into a textual prompt, and use it a starting point to answer your questions (if you send a message, she'll use directly that💬)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 She can solve your questions by (deep)searching the web and/or by retrieving relevant information from a vector database containing TypeScript documentation. If the answer is simple, she can also reply directly (no tools needed!)', 'raw': '🧠 She can solve your questions by (deep)searching the web and/or by retrieving relevant information from a vector database containing TypeScript documentation. If the answer is simple, she can also reply directly (no tools needed!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\U0001f6dc To ease her life, TySVA has all the tools she needs available through Model Context Protocol (MCP)', 'raw': '\U0001f6dc To ease her life, TySVA has all the tools she needs available through Model Context Protocol (MCP)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🔊 Once she's done, she returns her answer to you, along with a voice summary of what she did and what solution she found"", 'raw': ""🔊 Once she's done, she returns her answer to you, along with a voice summary of what she did and what solution she found""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But how does she do that? What are her components?🤨', 'raw': 'But how does she do that? What are her components?🤨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 Qdrant + HuggingFace give her the documentation knowledge, providing the vector database and the embeddings', 'raw': '📖 Qdrant + HuggingFace give her the documentation knowledge, providing the vector database and the embeddings'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Linkup provides her with up-to-date, grounded answers, connecting her to the web', 'raw': '🌐 Linkup provides her with up-to-date, grounded answers, connecting her to the web'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦙 LlamaIndex makes up her brain, with the whole agentic architecture', 'raw': '🦙 LlamaIndex makes up her brain, with the whole agentic architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎤 ElevenLabs gives her ears and mouth, transcribing and producing voice inputs and outoputs', 'raw': '🎤 ElevenLabs gives her ears and mouth, transcribing and producing voice inputs and outoputs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📜 Groq provides her with speech, being the LLM provider behind TySVA', 'raw': '📜 Groq provides her with speech, being the LLM provider behind TySVA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Gradio+FastAPI make up her face and fibers, providing a seamless backend-to-frontend integration', 'raw': '🎨 Gradio+FastAPI make up her face and fibers, providing a seamless backend-to-frontend integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you're now curious of trying her, you can easily do that by spinning her up locally (and with Docker!🐋) from the GitHub repo ➡️ "", 'raw': ""If you're now curious of trying her, you can easily do that by spinning her up locally (and with Docker!🐋) from the GitHub repo ➡️ ""}, {'type': 'link', 'href': 'https://github.com/AstraBert/TySVA', 'raw': 'https://github.com/AstraBert/TySVA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And feel free to leave any feedback!✨', 'raw': 'And feel free to leave any feedback!✨'}]","I heard someone saying 𝘃𝗼𝗶𝗰𝗲 assistants are the future, and someone else that 𝗠𝗖𝗣 will rule the AI world... So I decided to combine both!🚀 + +Meet 𝐓𝐲𝐒𝐕𝐀 (𝗧𝘆pe𝗦cript 𝗩oice 𝗔ssistant, https://github.com/AstraBert/TySVA), your (speaking) AI companion for everyday TypeScript programming tasks!🎙️ + +TySVA is a skilled TypeScript expert and, to provide accurate and up-to-date responses, she leverages the following workflow: +🗣️ If you talk to her, she converts the audio into a textual prompt, and use it a starting point to answer your questions (if you send a message, she'll use directly that💬) +🧠 She can solve your questions by (deep)searching the web and/or by retrieving relevant information from a vector database containing TypeScript documentation. If the answer is simple, she can also reply directly (no tools needed!) +🛜 To ease her life, TySVA has all the tools she needs available through Model Context Protocol (MCP) +🔊 Once she's done, she returns her answer to you, along with a voice summary of what she did and what solution she found + +But how does she do that? What are her components?🤨 + +📖 Qdrant + HuggingFace give her the documentation knowledge, providing the vector database and the embeddings +🌐 Linkup provides her with up-to-date, grounded answers, connecting her to the web +🦙 LlamaIndex makes up her brain, with the whole agentic architecture +🎤 ElevenLabs gives her ears and mouth, transcribing and producing voice inputs and outoputs +📜 Groq provides her with speech, being the LLM provider behind TySVA +🎨 Gradio+FastAPI make up her face and fibers, providing a seamless backend-to-frontend integration + +If you're now curious of trying her, you can easily do that by spinning her up locally (and with Docker!🐋) from the GitHub repo ➡️ https://github.com/AstraBert/TySVA + +And feel free to leave any feedback!✨","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65e330e7edc2f7306e252448/NQ0cxEOX587wFoky63-5M.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'dantezxcd'], 'count': 2}, {'reaction': '👍', 'users': ['JLouisBiz'], 'count': 1}]",2025-04-03 15:40:49,2025-04-03 15:40:49.314,[],/posts/as-cle-bert/933495110003944,751,"{'language': 'en', 'probability': 0.9288612604141235}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66c8dc99951843ca6762fe02/yagkY9dY7_-qw8hAAPWiK.png,93.0,Rebekah Bogdanoff,DualityAI-RebekahBogdanoff,421214738933010,"[{'type': 'text', 'value': '🔥 Duality AI released some new datasets this week! 🔥 ', 'raw': '🔥 Duality AI released some new datasets this week! 🔥 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Click here for the Multi Instance Object Detection dataset: ', 'raw': 'Click here for the Multi Instance Object Detection dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'duality-robotics/YOLOv8-Multi-Instance-Object-Detection-Dataset'}, 'url': 'https://huggingface.co/datasets/duality-robotics/YOLOv8-Multi-Instance-Object-Detection-Dataset', 'raw': 'https://huggingface.co/datasets/duality-robotics/YOLOv8-Multi-Instance-Object-Detection-Dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Click here for the Single Class Object Detection dataset: ', 'raw': 'Click here for the Single Class Object Detection dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'duality-robotics/YOLOv8-Object-Detection-02-Dataset'}, 'url': 'https://huggingface.co/datasets/duality-robotics/YOLOv8-Object-Detection-02-Dataset', 'raw': 'https://huggingface.co/datasets/duality-robotics/YOLOv8-Object-Detection-02-Dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can also get larger datasets here: ', 'raw': 'You can also get larger datasets here: '}, {'type': 'link', 'href': 'https://falcon.duality.ai/secure/documentation/ex4-dataset?sidebarMode=learn&utm_source=huggingface&utm_medium=dataset&utm_campaign=multiinstance', 'raw': 'https://falcon.duality.ai/secure/documentation/ex4-dataset?sidebarMode=learn&utm_source=huggingface&utm_medium=dataset&utm_campaign=multiinstance'}]","🔥 Duality AI released some new datasets this week! 🔥 + +Click here for the Multi Instance Object Detection dataset: https://huggingface.co/datasets/duality-robotics/YOLOv8-Multi-Instance-Object-Detection-Dataset + +Click here for the Single Class Object Detection dataset: https://huggingface.co/datasets/duality-robotics/YOLOv8-Object-Detection-02-Dataset + +You can also get larger datasets here: https://falcon.duality.ai/secure/documentation/ex4-dataset?sidebarMode=learn&utm_source=huggingface&utm_medium=dataset&utm_campaign=multiinstance",[],[],"[{'reaction': '👍', 'users': ['DualityAI-RebekahBogdanoff', 'John6666', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['DualityAI-RebekahBogdanoff', 'dantezxcd'], 'count': 2}, {'reaction': '😎', 'users': ['DualityAI-RebekahBogdanoff', 'IamValeAI'], 'count': 2}, {'reaction': '🤝', 'users': ['DualityAI-RebekahBogdanoff', 'IamValeAI'], 'count': 2}, {'reaction': '🚀', 'users': ['DualityAI-RebekahBogdanoff'], 'count': 1}]",2025-04-03 15:32:39,2025-04-06 00:22:30.470,"[{'_id': '67ece7a330ed236a725d26af', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67ece7a330ed236a725d26af/hqZo8toONEcvMjN7GMuGL.png', 'fullname': 'Vale AI', 'name': 'IamValeAI', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/DualityAI-RebekahBogdanoff/421214738933010,1156,"{'language': 'en', 'probability': 0.6077587604522705}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg,29.0,Sk md saad amin,Reality123b,442874021588924,"[{'type': 'text', 'value': 'Does anyone know how to convert a replit app into a huggingface spaces app?', 'raw': 'Does anyone know how to convert a replit app into a huggingface spaces app?'}]",Does anyone know how to convert a replit app into a huggingface spaces app?,[],[],"[{'reaction': '🧠', 'users': ['John6666', 'dantezxcd', 'nyuuzyou', 'teleren'], 'count': 4}]",2025-04-03 14:27:39,2025-04-03 14:27:39.043,[],/posts/Reality123b/442874021588924,582,"{'language': 'en', 'probability': 0.9179877042770386}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6404403bad54665351d42ee2/TCC5Na8ojtSL1MJAzTn3b.png,38.0,zamal_,zamal,728543017322263,"[{'type': 'text', 'value': '🚀 DeepGit Lite is live! 🔍✨', 'raw': '🚀 DeepGit Lite is live! 🔍✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hey folks!', 'raw': 'Hey folks!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just launched DeepGit Lite — a lighter version of DeepGit with fewer components under the hood.', 'raw': 'Just launched DeepGit Lite — a lighter version of DeepGit with fewer components under the hood.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It won’t perform quite like the full powerhouse, but it’s great for a quick peek and first-hand feel! ⚙️👀', 'raw': 'It won’t perform quite like the full powerhouse, but it’s great for a quick peek and first-hand feel! ⚙️👀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Give it a spin and tell us what you think!', 'raw': 'Give it a spin and tell us what you think!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Try it here ', 'raw': '👉 Try it here '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'zamal/DeepGit-lite'}, 'url': 'https://huggingface.co/spaces/zamal/DeepGit-lite', 'raw': 'https://huggingface.co/spaces/zamal/DeepGit-lite'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' #opensource #DeepGit #gradio #githubresearch', 'raw': ' #opensource #DeepGit #gradio #githubresearch'}]","🚀 DeepGit Lite is live! 🔍✨ + +Hey folks! +Just launched DeepGit Lite — a lighter version of DeepGit with fewer components under the hood. +It won’t perform quite like the full powerhouse, but it’s great for a quick peek and first-hand feel! ⚙️👀 + +Give it a spin and tell us what you think! +👉 Try it here https://huggingface.co/spaces/zamal/DeepGit-lite + #opensource #DeepGit #gradio #githubresearch","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6404403bad54665351d42ee2/FGZ1-5ZZLoI3ThzFbC-IV.png'}]",[],"[{'reaction': '🚀', 'users': ['zamal', 'dantezxcd', 'John6666', 'LPDoctor'], 'count': 4}, {'reaction': '🔥', 'users': ['zamal', 'dantezxcd', 'LPDoctor'], 'count': 3}, {'reaction': '👀', 'users': ['zamal', 'dantezxcd', 'LPDoctor'], 'count': 3}]",2025-04-03 13:34:15,2025-04-28 18:39:47.912,"[{'_id': '644bfeb5cfebfbf8d8ff210c', 'avatarUrl': '/avatars/9c23d0a14d7d304086d14f28739d6cbb.svg', 'fullname': 'Naveen Raam Vivekanandhan', 'name': 'naveenraam28', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '64b308f5cf2da881122006b1', 'avatarUrl': '/avatars/6453409c70af40c7735ed229841ca257.svg', 'fullname': 'Alexey Elizarov', 'name': 'devall', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '6404403bad54665351d42ee2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6404403bad54665351d42ee2/TCC5Na8ojtSL1MJAzTn3b.png', 'fullname': 'zamal_', 'name': 'zamal', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 38, 'isFollowing': False}]",/posts/zamal/728543017322263,1985,"{'language': 'en', 'probability': 0.8382149338722229}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/qW3-oKDLFJpue2iS5VjT2.jpeg,134.0,Jason Corkill,jasoncorkill,394806250895359,"[{'type': 'text', 'value': '🚀 Rapidata: Setting the Standard for Model Evaluation', 'raw': '🚀 Rapidata: Setting the Standard for Model Evaluation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Rapidata is proud to announce our first independent appearance in academic research, featured in the Lumina-Image 2.0 paper. This marks the beginning of our journey to become the standard for testing text-to-image and generative models. Our expertise in large-scale human annotations allows researchers to refine their models with accurate, real-world feedback.', 'raw': 'Rapidata is proud to announce our first independent appearance in academic research, featured in the Lumina-Image 2.0 paper. This marks the beginning of our journey to become the standard for testing text-to-image and generative models. Our expertise in large-scale human annotations allows researchers to refine their models with accurate, real-world feedback.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As we continue to establish ourselves as a key player in model evaluation, we’re here to support researchers with high-quality annotations at scale. Reach out to info@rapidata.ai to see how we can help.', 'raw': 'As we continue to establish ourselves as a key player in model evaluation, we’re here to support researchers with high-quality annotations at scale. Reach out to info@rapidata.ai to see how we can help.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.21758'}, 'url': 'https://huggingface.co/papers/2503.21758', 'raw': 'https://huggingface.co/papers/2503.21758', 'label': 'Lumina-Image 2.0: A Unified and Efficient Image Generative Framework (2503.21758)'}]","🚀 Rapidata: Setting the Standard for Model Evaluation + +Rapidata is proud to announce our first independent appearance in academic research, featured in the Lumina-Image 2.0 paper. This marks the beginning of our journey to become the standard for testing text-to-image and generative models. Our expertise in large-scale human annotations allows researchers to refine their models with accurate, real-world feedback. + +As we continue to establish ourselves as a key player in model evaluation, we’re here to support researchers with high-quality annotations at scale. Reach out to info@rapidata.ai to see how we can help. + +https://huggingface.co/papers/2503.21758","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66f5624c42b853e73e0738eb/qX2GRaGzm-9xRzoAcI6kq.png'}]",[],"[{'reaction': '🔥', 'users': ['yohoji', 'maalber', 'jparavicini', 'Kchanger', 'jasoncorkill', 'LinoGiger', 'tmanuel', 'John6666', 'dantezxcd', 'Sneccello'], 'count': 10}, {'reaction': '🚀', 'users': ['yohoji', 'jparavicini', 'Kchanger', 'jasoncorkill', 'LinoGiger', 'maalber', 'lutao1993', 'dantezxcd'], 'count': 8}, {'reaction': '❤️', 'users': ['yohoji', 'maalber', 'jparavicini', 'Kchanger', 'jasoncorkill', 'dantezxcd'], 'count': 6}, {'reaction': '🧠', 'users': ['maalber', 'jparavicini', 'Kchanger', 'jasoncorkill', '1234aurel', 'dantezxcd'], 'count': 6}, {'reaction': '👀', 'users': ['jparavicini', 'Kchanger', 'jasoncorkill', 'dantezxcd'], 'count': 4}]",2025-04-03 12:07:48,2025-04-03 12:09:24.554,[],/posts/jasoncorkill/394806250895359,2384,"{'language': 'en', 'probability': 0.9154148101806641}",0 +/avatars/306f5de6ee725fc131857d39038c787b.svg,1.0,theaimoron,theaimoron,359923402366070,"[{'type': 'text', 'value': 'stupid question i know but is it possible for me to inform my LLM to build a chatbot that specialises in special need education and early intervention techniques. if so how do I do this please help. thanks so much in advance', 'raw': 'stupid question i know but is it possible for me to inform my LLM to build a chatbot that specialises in special need education and early intervention techniques. if so how do I do this please help. thanks so much in advance'}, {'type': 'new_line', 'raw': '\n'}]","stupid question i know but is it possible for me to inform my LLM to build a chatbot that specialises in special need education and early intervention techniques. if so how do I do this please help. thanks so much in advance +",[],[],"[{'reaction': '👀', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-04-03 11:35:20,2025-04-03 13:10:27.373,"[{'_id': '628ccf2530d48c565bae0af1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1653395218540-noauth.png', 'fullname': 'logan zoellner', 'name': 'nagolinc', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 11, 'isFollowing': False}, {'_id': '67e63349d2943e0b668f2cf3', 'avatarUrl': '/avatars/306f5de6ee725fc131857d39038c787b.svg', 'fullname': 'theaimoron', 'name': 'theaimoron', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/theaimoron/359923402366070,1473,"{'language': 'en', 'probability': 0.9416311383247375}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg,40.0,Jordan Legg,takarajordan,672093400959006,"[{'type': 'text', 'value': 'AI research over coffee ☕️', 'raw': 'AI research over coffee ☕️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'No abstracts, just bullet points.', 'raw': 'No abstracts, just bullet points.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Start your day here: ', 'raw': 'Start your day here: '}, {'type': 'link', 'href': 'https://tldr.takara.ai', 'raw': 'https://tldr.takara.ai'}]","AI research over coffee ☕️ +No abstracts, just bullet points. +Start your day here: https://tldr.takara.ai","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6613f7ae43c4456e13ecbdcc/acGUv9CC0L0slLE2QpzfV.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd', 'Yukkkop', 'vylance'], 'count': 4}]",2025-04-03 10:56:24,2025-04-06 02:08:34.634,"[{'_id': '668a873c0bf195d6e5c1145a', 'avatarUrl': '/avatars/81ed58c4996b33710ec8557eb676e546.svg', 'fullname': 'SajanGhimire', 'name': 'SajanGhi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/takarajordan/672093400959006,1594,"{'language': 'en', 'probability': 0.7724376320838928}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,698802607217100,"[{'type': 'text', 'value': 'I added OneSQL 3B to the model family, and its GGUF/AWQ/MLX quantizations. This model can fit into more places, and comfortably run on Apple M1 devices with twice the throughput (half the generation time) of its 7B sibling.', 'raw': 'I added OneSQL 3B to the model family, and its GGUF/AWQ/MLX quantizations. This model can fit into more places, and comfortably run on Apple M1 devices with twice the throughput (half the generation time) of its 7B sibling.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f'}, 'url': 'https://huggingface.co/collections/onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f', 'raw': 'https://huggingface.co/collections/onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f'}]","I added OneSQL 3B to the model family, and its GGUF/AWQ/MLX quantizations. This model can fit into more places, and comfortably run on Apple M1 devices with twice the throughput (half the generation time) of its 7B sibling. + +https://huggingface.co/collections/onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f",[],[],"[{'reaction': '👍', 'users': ['John6666', 'schekin', 'dantezxcd', 'JLouisBiz'], 'count': 4}, {'reaction': '🔥', 'users': ['ahmeddoma'], 'count': 1}]",2025-04-03 07:08:22,2025-04-03 11:04:45.804,"[{'_id': '67b82d8129d4ee89a322e783', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4woCkyIo4JLEV-8HZgKMc.png', 'fullname': 'Quality Sistema Certifications and Inspections', 'name': 'Sistemacerts', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '67ee6afa0afb46ec98c9a011', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/lcHKhtc6fLRUr2tRHG9jz.png', 'fullname': 'ahmed doma', 'name': 'ahmeddoma', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/onekq/698802607217100,1985,"{'language': 'en', 'probability': 0.8972132205963135}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/H5ncB4vaBtP8GVCidgxL0.png,242.0,seawolf,seawolf2357,883323339740165,"[{'type': 'text', 'value': '🎨 Ghibli-Style Image Generation with Multilingual Text Integration: FLUX.1 Hugging Face Edition 🌏✨', 'raw': '🎨 Ghibli-Style Image Generation with Multilingual Text Integration: FLUX.1 Hugging Face Edition 🌏✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Hello creators! Today I'm introducing a special image generator that combines the beautiful aesthetics of Studio Ghibli with multilingual text integration! 😍"", 'raw': ""Hello creators! Today I'm introducing a special image generator that combines the beautiful aesthetics of Studio Ghibli with multilingual text integration! 😍""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'seawolf2357/Ghibli-Multilingual-Text-rendering'}, 'url': 'https://huggingface.co/spaces/seawolf2357/Ghibli-Multilingual-Text-rendering', 'raw': 'https://huggingface.co/spaces/seawolf2357/Ghibli-Multilingual-Text-rendering'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ghibli-Style Image Generation - High-quality animation-style images based on FLUX.1', 'raw': 'Ghibli-Style Image Generation - High-quality animation-style images based on FLUX.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Multilingual Text Rendering - Support for Korean, Japanese, English, and all languages! 🇰🇷🇯🇵🇬🇧', 'raw': 'Multilingual Text Rendering - Support for Korean, Japanese, English, and all languages! 🇰🇷🇯🇵🇬🇧'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Automatic Image Editing with Simple Prompts - Just input your desired text and you're done!"", 'raw': ""Automatic Image Editing with Simple Prompts - Just input your desired text and you're done!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Two Stylistic Variations Provided - Get two different results from a single prompt', 'raw': 'Two Stylistic Variations Provided - Get two different results from a single prompt'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full Hugging Face Spaces Support - Deploy and share instantly!', 'raw': 'Full Hugging Face Spaces Support - Deploy and share instantly!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 How Does It Work?', 'raw': '🚀 How Does It Work?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enter a prompt describing your desired image (e.g., ""a cat sitting by the window"")', 'raw': 'Enter a prompt describing your desired image (e.g., ""a cat sitting by the window"")'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Input the text you want to add (any language works!)', 'raw': 'Input the text you want to add (any language works!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Select the text position, size, and color', 'raw': 'Select the text position, size, and color'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Two different versions are automatically generated!', 'raw': 'Two different versions are automatically generated!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💯 Advantages of This Model', 'raw': '💯 Advantages of This Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'No Tedious Post-Editing Needed - Text is perfectly integrated during generation', 'raw': 'No Tedious Post-Editing Needed - Text is perfectly integrated during generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Natural Text Integration - Text automatically adjusts to match the image style', 'raw': 'Natural Text Integration - Text automatically adjusts to match the image style'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Perfect Multilingual Support - Any language renders beautifully!', 'raw': 'Perfect Multilingual Support - Any language renders beautifully!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'User-Friendly Interface - Easily adjust text size, position, and color', 'raw': 'User-Friendly Interface - Easily adjust text size, position, and color'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'One-Click Hugging Face Deployment - Use immediately without complex setup', 'raw': 'One-Click Hugging Face Deployment - Use immediately without complex setup'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎭 Use Cases', 'raw': '🎭 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Creating multilingual greeting cards', 'raw': 'Creating multilingual greeting cards'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Animation-style social media content', 'raw': 'Animation-style social media content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ghibli-inspired posters or banners', 'raw': 'Ghibli-inspired posters or banners'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Character images with dialogue in various languages', 'raw': 'Character images with dialogue in various languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sharing with the community through Hugging Face Spaces', 'raw': 'Sharing with the community through Hugging Face Spaces'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This project leverages Hugging Face's FLUX.1 model to open new possibilities for seamlessly integrating high-quality Ghibli-style images with multilingual text using just prompts! 🌈"", 'raw': ""This project leverages Hugging Face's FLUX.1 model to open new possibilities for seamlessly integrating high-quality Ghibli-style images with multilingual text using just prompts! 🌈""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it now and create your own artistic masterpieces! 🎨✨', 'raw': 'Try it now and create your own artistic masterpieces! 🎨✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#GhibliStyle #MultilingualSupport #AIImageGeneration #TextRendering #FLUX #HuggingFace', 'raw': '#GhibliStyle #MultilingualSupport #AIImageGeneration #TextRendering #FLUX #HuggingFace'}]","🎨 Ghibli-Style Image Generation with Multilingual Text Integration: FLUX.1 Hugging Face Edition 🌏✨ + +Hello creators! Today I'm introducing a special image generator that combines the beautiful aesthetics of Studio Ghibli with multilingual text integration! 😍 + +https://huggingface.co/spaces/seawolf2357/Ghibli-Multilingual-Text-rendering + +✨ Key Features + +Ghibli-Style Image Generation - High-quality animation-style images based on FLUX.1 +Multilingual Text Rendering - Support for Korean, Japanese, English, and all languages! 🇰🇷🇯🇵🇬🇧 +Automatic Image Editing with Simple Prompts - Just input your desired text and you're done! +Two Stylistic Variations Provided - Get two different results from a single prompt +Full Hugging Face Spaces Support - Deploy and share instantly! + +🚀 How Does It Work? + +Enter a prompt describing your desired image (e.g., ""a cat sitting by the window"") +Input the text you want to add (any language works!) +Select the text position, size, and color +Two different versions are automatically generated! + +💯 Advantages of This Model + +No Tedious Post-Editing Needed - Text is perfectly integrated during generation +Natural Text Integration - Text automatically adjusts to match the image style +Perfect Multilingual Support - Any language renders beautifully! +User-Friendly Interface - Easily adjust text size, position, and color +One-Click Hugging Face Deployment - Use immediately without complex setup + +🎭 Use Cases + +Creating multilingual greeting cards +Animation-style social media content +Ghibli-inspired posters or banners +Character images with dialogue in various languages +Sharing with the community through Hugging Face Spaces + +This project leverages Hugging Face's FLUX.1 model to open new possibilities for seamlessly integrating high-quality Ghibli-style images with multilingual text using just prompts! 🌈 +Try it now and create your own artistic masterpieces! 🎨✨ + +#GhibliStyle #MultilingualSupport #AIImageGeneration #TextRendering #FLUX #HuggingFace","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c3550d8cc87cf0c06838e7/mMu0dKD0irw5-ymFgCBup.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c3550d8cc87cf0c06838e7/kSIgY70B6yoIRYzNOQDfZ.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c3550d8cc87cf0c06838e7/1d7EgtA8PsT3sJRErMhaM.webp'}]",[],"[{'reaction': '🔥', 'users': ['seawolf2357', 'openfree', 'ginipick', 'cutechicken', 'fantos', 'aiqtech', 'aiqcamp', 'fantaxy', 'gunship999', 'kolaslab', 'immunobiotech', 'gopassso', 'solchanhwee', 'baekyuri', 'hugsfacelover', 'catjammer', 'songcreeset', 'coffeelover1995', 'lscsetepfa', 'ainagua', 'kokomu', 'stardust23', 'ncicawardsecter', 'idstech0', 'jacriessoul', 'HOLYBRESSYOU', 'josetedaic', 'varianstoss', 'cmlrours', 'freeterdance', 'doiduisqwe', 'John6666', 'linoyts', 'BryanBradfo', 'Godvers', 'spawn08', 'mdahsanulhimel', 'dantezxcd', 'DArkfirehuman', 'Berzelius255', 'andreavictory', 'maseriumpark', 'novermer6th', 'britny', 'soundofmindexict', 'sofiadrake', 'tictocvice', 'cosyinstreet', 'stepbysteb', 'chillypuppoy', 'dailycurrydel', 'ineverriver', 'cowardpte', 'calacuria2003', 'arabriabricks', 'aweb', 'saintmarcel', 'hugpu', 'powergen4ai', 'ommuni6661', 'deepseekrgo', 'milpl', 'astralq', 'kowougadian', 'michaelangel', 'algebradavid', 'mikankio', 'victoresmart', 'brabebird', 'pilaos', 'leee4', 'Laikokwei', 'Kevin7x'], 'count': 73}, {'reaction': '🚀', 'users': ['seawolf2357', 'ginipick', 'cutechicken', 'fantos', 'aiqcamp', 'fantaxy', 'gunship999', 'immunobiotech', 'baekyuri', 'hugsfacelover', 'catjammer', 'songcreeset', 'coffeelover1995', 'lscsetepfa', 'ainagua', 'kokomu', 'stardust23', 'ncicawardsecter', 'idstech0', 'jacriessoul', 'josetedaic', 'varianstoss', 'doiduisqwe', 'gopassso', 'BryanBradfo', 'ahmeddoma', 'kolaslab', 'andreavictory', 'maseriumpark', 'stepbysteb', 'novermer6th', 'openfree', 'powergen4ai', 'soundofmindexict', 'milpl', 'ommuni6661', 'kowougadian', 'sofiadrake', 'aiqtech', 'tictocvice', 'cosyinstreet', 'chillypuppoy', 'dailycurrydel', 'ineverriver', 'cowardpte', 'calacuria2003', 'arabriabricks', 'aweb', 'saintmarcel', 'victoresmart', 'brabebird', 'pilaos', 'Laikokwei'], 'count': 53}, {'reaction': '👀', 'users': ['seawolf2357', 'cutechicken', 'fantos', 'aiqcamp', 'immunobiotech', 'baekyuri', 'catjammer', 'songcreeset', 'ainagua', 'stardust23', 'ncicawardsecter', 'idstech0', 'HOLYBRESSYOU', 'doiduisqwe', 'BryanBradfo', 'kolaslab', 'andreavictory', 'novermer6th', 'openfree', 'powergen4ai', 'soundofmindexict', 'maseriumpark', 'gunship999', 'sofiadrake', 'milpl', 'ginipick', 'aiqtech', 'fantaxy', 'coffeelover1995', 'stepbysteb', 'brabebird', 'cowardpte'], 'count': 32}, {'reaction': '❤️', 'users': ['seawolf2357', 'cutechicken', 'aiqcamp', 'catjammer', 'songcreeset', 'ncicawardsecter', '1234aurel', 'kolaslab', 'doiduisqwe', 'powergen4ai', 'fantos', 'novermer6th', 'openfree', 'andreavictory', 'gunship999', 'sofiadrake', 'milpl', 'maseriumpark', 'immunobiotech', 'aiqtech', 'fantaxy', 'soundofmindexict', 'coffeelover1995', 'brabebird'], 'count': 24}, {'reaction': '🤗', 'users': ['seawolf2357', 'cutechicken', 'catjammer', '1234aurel', 'doiduisqwe', 'powergen4ai', 'songcreeset', 'kolaslab', 'openfree', 'gunship999', 'andreavictory', 'fantos', 'aiqcamp', 'sofiadrake', 'maseriumpark', 'brabebird'], 'count': 16}, {'reaction': '😎', 'users': ['seawolf2357', 'catjammer', 'cutechicken', 'powergen4ai', 'doiduisqwe', 'gunship999', 'kolaslab', 'aiqcamp', 'maseriumpark', 'andreavictory', 'brabebird', 'openfree'], 'count': 12}, {'reaction': '👍', 'users': ['seawolf2357', 'catjammer', 'gopassso', 'solchanhwee', 'gunship999', 'cutechicken', 'maseriumpark', 'andreavictory', 'powergen4ai', 'brabebird', 'kolaslab', 'svjack'], 'count': 12}, {'reaction': '🧠', 'users': ['seawolf2357', 'catjammer', 'cutechicken', 'gunship999', 'kolaslab', 'aiqcamp', 'powergen4ai', 'maseriumpark', 'andreavictory', 'brabebird'], 'count': 10}, {'reaction': '➕', 'users': ['seawolf2357', 'catjammer', 'powergen4ai', 'cutechicken', 'gunship999', 'kolaslab', 'aiqcamp', 'maseriumpark', 'andreavictory', 'brabebird'], 'count': 10}, {'reaction': '🤯', 'users': ['seawolf2357', 'gopassso', 'powergen4ai', 'gunship999', 'maseriumpark', 'andreavictory', 'brabebird', 'cutechicken'], 'count': 8}, {'reaction': '😔', 'users': ['seawolf2357', 'gopassso', 'gunship999', 'maseriumpark', 'andreavictory', 'brabebird', 'powergen4ai', 'cutechicken'], 'count': 8}, {'reaction': '🤝', 'users': ['seawolf2357', 'gopassso', 'gunship999', 'cutechicken', 'maseriumpark', 'andreavictory', 'brabebird', 'powergen4ai'], 'count': 8}]",2025-04-03 06:05:27,2025-04-09 21:51:46.044,"[{'_id': '67b82d8129d4ee89a322e783', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4woCkyIo4JLEV-8HZgKMc.png', 'fullname': 'Quality Sistema Certifications and Inspections', 'name': 'Sistemacerts', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '673c531198b2d09abcfb8e3e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/673c531198b2d09abcfb8e3e/MsdIhD5WhBzuRsPghZ9py.jpeg', 'fullname': 'vasilcov', 'name': '1234aurel', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '66e7af8d13a1ada4fd38baa6', 'avatarUrl': '/avatars/3436ace0ebc7f2ea9c05261ba32a92ba.svg', 'fullname': 'ahamedaathal', 'name': 'ahamedaathal', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '67a375ea7d1cfad828f9b196', 'avatarUrl': '/avatars/24bc2488b3b6db1d44f0ed369b115f36.svg', 'fullname': 'Huan Le', 'name': 'huanle1515-hf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/seawolf2357/883323339740165,8399,"{'language': 'en', 'probability': 0.7378907203674316}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,675785809270098,"[{'type': 'text', 'value': 'Adding AWQ version of OneSQL for vLLM folks.', 'raw': 'Adding AWQ version of OneSQL for vLLM folks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f'}, 'url': 'https://huggingface.co/collections/onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f', 'raw': 'https://huggingface.co/collections/onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f'}]","Adding AWQ version of OneSQL for vLLM folks. + +https://huggingface.co/collections/onekq-ai/onesql-v01-qwen-67d8e3eb1611c5532bb90c5f",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'dantezxcd', 'Takugen'], 'count': 3}]",2025-03-31 04:02:10,2025-03-31 04:02:10.934,[],/posts/onekq/675785809270098,993,"{'language': 'en', 'probability': 0.47318461537361145}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,289592637571952,"[{'type': 'text', 'value': '✈️ FlightAware Photos Dataset - ', 'raw': '✈️ FlightAware Photos Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/flightaware'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/flightaware', 'raw': 'https://huggingface.co/datasets/nyuuzyou/flightaware'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection of approximately 197,718 aviation photographs featuring:', 'raw': 'Collection of approximately 197,718 aviation photographs featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- High-quality aircraft images across multiple sizes and formats', 'raw': '- High-quality aircraft images across multiple sizes and formats'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Comprehensive metadata including aircraft registrations, types, and photographer information', 'raw': '- Comprehensive metadata including aircraft registrations, types, and photographer information'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- View counts, ratings, and submission timestamps for each photo', 'raw': '- View counts, ratings, and submission timestamps for each photo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Rich classification data preserving original titles, descriptions, and photographer badges', 'raw': '- Rich classification data preserving original titles, descriptions, and photographer badges'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This dataset offers a unique visual archive of aircraft spanning commercial, military, and private aviation captured by FlightAware's community of photographers under CC BY-NC-SA 3.0 license."", 'raw': ""This dataset offers a unique visual archive of aircraft spanning commercial, military, and private aviation captured by FlightAware's community of photographers under CC BY-NC-SA 3.0 license.""}]","✈️ FlightAware Photos Dataset - https://huggingface.co/datasets/nyuuzyou/flightaware + +Collection of approximately 197,718 aviation photographs featuring: +- High-quality aircraft images across multiple sizes and formats +- Comprehensive metadata including aircraft registrations, types, and photographer information +- View counts, ratings, and submission timestamps for each photo +- Rich classification data preserving original titles, descriptions, and photographer badges + +This dataset offers a unique visual archive of aircraft spanning commercial, military, and private aviation captured by FlightAware's community of photographers under CC BY-NC-SA 3.0 license.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'Anas-Mohiuddin-Syed', 'clem', 'penouc', 'dantezxcd', 'on3dj', 'jessepisel'], 'count': 7}, {'reaction': '❤️', 'users': ['clem', 'dantezxcd', 'Takugen'], 'count': 3}]",2025-03-30 23:57:09,2025-03-30 23:57:09.933,[],/posts/nyuuzyou/289592637571952,1608,"{'language': 'en', 'probability': 0.8565258979797363}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,303811519371828,"[{'type': 'text', 'value': '🚀 DEEPSEEK R1… Replicated! 🧠✨', 'raw': '🚀 DEEPSEEK R1… Replicated! 🧠✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All powered by just ONE system prompt.', 'raw': 'All powered by just ONE system prompt.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it. Compare it. See for yourself. 👀', 'raw': 'Try it. Compare it. See for yourself. 👀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥 Even better than the original — with richer, more insightful replies.', 'raw': '🔥 Even better than the original — with richer, more insightful replies.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 No gimmicks. Just pure AI performance.', 'raw': '🎯 No gimmicks. Just pure AI performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[](', 'raw': '[]('}, {'type': 'link', 'href': 'https://chatgpt.com/g/g-67e5e1e379e88191873752b60f518a14-deepseek-r1-thinking', 'raw': 'https://chatgpt.com/g/g-67e5e1e379e88191873752b60f518a14-deepseek-r1-thinking'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PROMPT IN THE COMMENTS ', 'raw': 'PROMPT IN THE COMMENTS '}]","🚀 DEEPSEEK R1… Replicated! 🧠✨ +All powered by just ONE system prompt. +Try it. Compare it. See for yourself. 👀 +🔥 Even better than the original — with richer, more insightful replies. +🎯 No gimmicks. Just pure AI performance. + +[](https://chatgpt.com/g/g-67e5e1e379e88191873752b60f518a14-deepseek-r1-thinking) +PROMPT IN THE COMMENTS ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d00458fff501149572827f/nTciaGOgbDpRo28kqLlWt.png'}]",[],"[{'reaction': '👍', 'users': ['JLouisBiz', 'John6666', 'VXGRAPHICAL', 'tobit-trick', 'Anas-Mohiuddin-Syed', 'dantezxcd', 'on3dj', 'dipmakecryptofun', 'Chunte'], 'count': 9}, {'reaction': '🔥', 'users': ['dipmakecryptofun', 'Sirchuks', 'Chunte', 'Takugen'], 'count': 4}]",2025-03-30 22:01:41,2025-04-16 11:34:12.786,"[{'_id': '67ea67cacdf8c58da8717819', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aRTGk9nlafSv3SjON1H_k.jpeg', 'fullname': 'koman', 'name': 'hillaire', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '67b6ed09e6cf3cb3b58caeb0', 'avatarUrl': '/avatars/73f3fc194eea8f5f00f5bec5de2c75e3.svg', 'fullname': 'Marcus Cedric R. Idia', 'name': 'marcuscedricridia', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7, 'isFollowing': False}, {'_id': '6469054fff18750165a78ca0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/kN3gxxRDnuyBlo3JZcHAA.png', 'fullname': 'Angelino Santiago', 'name': 'MrDevolver', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 18, 'isFollowing': False}, {'_id': '661ee8bcbf6142a6e303e346', 'avatarUrl': '/avatars/cd358ceab6fb5427ee18cdb48c2c3ccc.svg', 'fullname': 'Pedro Naves', 'name': 'pedropnaves', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65d00458fff501149572827f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg', 'fullname': 'Sami Halawa', 'name': 'samihalawa', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}]",/posts/samihalawa/303811519371828,1889,"{'language': 'en', 'probability': 0.8427507281303406}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/66febbf898f30194f8b73451/b8NrAI3tlWXYw6VMOVHc7.jpeg,19.0,Massimo Roberto Scamarcia,mrs83,223725055811929,"[{'type': 'text', 'value': 'To developers: Build opt-in systems. ', 'raw': 'To developers: Build opt-in systems. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To policymakers: Legislate data transparency.', 'raw': 'To policymakers: Legislate data transparency.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To artists: Unionize.', 'raw': 'To artists: Unionize.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To users: Demand ethical tools.', 'raw': 'To users: Demand ethical tools.'}]","To developers: Build opt-in systems. +To policymakers: Legislate data transparency. +To artists: Unionize. +To users: Demand ethical tools.",[],[],"[{'reaction': '👍', 'users': ['JLouisBiz', 'Anas-Mohiuddin-Syed', 'dantezxcd', 'on3dj', 'Takugen'], 'count': 5}, {'reaction': '❤️', 'users': ['elreykusumo', 'Anas-Mohiuddin-Syed', 'dantezxcd'], 'count': 3}]",2025-03-30 18:32:25,2025-04-02 17:25:32.066,"[{'_id': '66febbf898f30194f8b73451', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66febbf898f30194f8b73451/b8NrAI3tlWXYw6VMOVHc7.jpeg', 'fullname': 'Massimo Roberto Scamarcia', 'name': 'mrs83', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 19, 'isFollowing': False}]",/posts/mrs83/223725055811929,1763,"{'language': 'en', 'probability': 0.6412793397903442}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857746553-5df7e9e5da6d0311fd3d53f9.jpeg,1173.0,Thomas Wolf,thomwolf,431321353076398,"[{'type': 'text', 'value': 'The new DeepSite space is really insane for vibe-coders', 'raw': 'The new DeepSite space is really insane for vibe-coders'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'enzostvs/deepsite'}, 'url': 'https://huggingface.co/spaces/enzostvs/deepsite', 'raw': 'https://huggingface.co/spaces/enzostvs/deepsite'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With the wave of vibe-coding-optimized LLMs like the latest open-source DeepSeek model (version V3-0324), you can basically prompt out-of-the-box and create any app and game in one-shot.', 'raw': 'With the wave of vibe-coding-optimized LLMs like the latest open-source DeepSeek model (version V3-0324), you can basically prompt out-of-the-box and create any app and game in one-shot.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It feels so powerful to me, no more complex framework or under-the-hood prompt engineering to have a working text-to-app tool.', 'raw': 'It feels so powerful to me, no more complex framework or under-the-hood prompt engineering to have a working text-to-app tool.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI is eating the world and *open-source* AI is eating AI itself!', 'raw': 'AI is eating the world and *open-source* AI is eating AI itself!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PS: and even more meta is that the DeepSite app and DeepSeek model are both fully open-source code => time to start recursively improve?', 'raw': 'PS: and even more meta is that the DeepSite app and DeepSeek model are both fully open-source code => time to start recursively improve?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""PPS: you still need some inference hosting unless you're running the 600B param model at home, so check the very nice list of HF Inference Providers for this model: "", 'raw': ""PPS: you still need some inference hosting unless you're running the 600B param model at home, so check the very nice list of HF Inference Providers for this model: ""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-V3-0324'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-V3-0324', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-V3-0324'}, {'type': 'new_line', 'raw': '\n'}]","The new DeepSite space is really insane for vibe-coders +https://huggingface.co/spaces/enzostvs/deepsite + +With the wave of vibe-coding-optimized LLMs like the latest open-source DeepSeek model (version V3-0324), you can basically prompt out-of-the-box and create any app and game in one-shot. + +It feels so powerful to me, no more complex framework or under-the-hood prompt engineering to have a working text-to-app tool. + +AI is eating the world and *open-source* AI is eating AI itself! + +PS: and even more meta is that the DeepSite app and DeepSeek model are both fully open-source code => time to start recursively improve? + +PPS: you still need some inference hosting unless you're running the 600B param model at home, so check the very nice list of HF Inference Providers for this model: https://huggingface.co/deepseek-ai/DeepSeek-V3-0324 +",[],[],"[{'reaction': '❤️', 'users': ['elreykusumo', 'Anas-Mohiuddin-Syed', 'ggbetz', 'daniel-ltw', 'Primo35', 'Cloudy-Boom', 'John6666', 'orrzohar', 'anhaf', 'Nymbo', 'arcticfox23', 'dantezxcd', 'Takugen'], 'count': 13}, {'reaction': '😔', 'users': ['elreykusumo', 'Anas-Mohiuddin-Syed', 'dantezxcd'], 'count': 3}, {'reaction': '👍', 'users': ['ggbetz', 'dantezxcd', 'on3dj'], 'count': 3}]",2025-03-30 15:33:30,2025-03-31 01:00:17.302,"[{'_id': '675be558e16de4a95ab725c9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/sSJj7rzO-0BdUrZfYnZ8B.png', 'fullname': 'Jason Xu', 'name': 'Cloudy-Boom', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/thomwolf/431321353076398,3565,"{'language': 'en', 'probability': 0.8890993595123291}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,541972801764076,"[{'type': 'text', 'value': 'Want to vibecode with DeepSeek? Just spent 10 minutes with this space and created a full world indicators dashboard - literally just by describing what I wanted!', 'raw': 'Want to vibecode with DeepSeek? Just spent 10 minutes with this space and created a full world indicators dashboard - literally just by describing what I wanted!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Anyone can now prototype and deploy projects instantly.', 'raw': 'Anyone can now prototype and deploy projects instantly.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try out the app: ', 'raw': 'Try out the app: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'enzostvs/deepsite'}, 'url': 'https://huggingface.co/spaces/enzostvs/deepsite', 'raw': 'https://huggingface.co/spaces/enzostvs/deepsite'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My dashboard: ', 'raw': 'My dashboard: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fdaudens/world-indicators'}, 'url': 'https://huggingface.co/spaces/fdaudens/world-indicators', 'raw': 'https://huggingface.co/spaces/fdaudens/world-indicators'}]","Want to vibecode with DeepSeek? Just spent 10 minutes with this space and created a full world indicators dashboard - literally just by describing what I wanted! + +Anyone can now prototype and deploy projects instantly. + +Try out the app: https://huggingface.co/spaces/enzostvs/deepsite + +My dashboard: https://huggingface.co/spaces/fdaudens/world-indicators",[],[],"[{'reaction': '👍', 'users': ['JLouisBiz', 'ryg81', 'dantezxcd', 'on3dj', 'Takugen'], 'count': 5}, {'reaction': '👀', 'users': ['John6666', 'lunarflu', 'Anas-Mohiuddin-Syed', 'dantezxcd'], 'count': 4}, {'reaction': '🔥', 'users': ['Nicholuas', 'dantezxcd'], 'count': 2}]",2025-03-30 14:00:18,2025-03-30 14:00:18.310,[],/posts/fdaudens/541972801764076,2136,"{'language': 'en', 'probability': 0.86961430311203}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1643148576788-6196cf5c7bf349604352ecda.png,47.0,Smoliakov,Yehor,460907086720491,"[{'type': 'text', 'value': 'Are you interesting in different runtimes for AI models?', 'raw': 'Are you interesting in different runtimes for AI models?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out IREE (iree.dev), it convert models to MLIR and then execute on different platforms.', 'raw': 'Check out IREE (iree.dev), it convert models to MLIR and then execute on different platforms.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have tested it in Rust on CPU and CUDA: ', 'raw': 'I have tested it in Rust on CPU and CUDA: '}, {'type': 'link', 'href': 'https://github.com/egorsmkv/eerie-yolo11', 'raw': 'https://github.com/egorsmkv/eerie-yolo11'}, {'type': 'new_line', 'raw': '\n'}]","Are you interesting in different runtimes for AI models? + +Check out IREE (iree.dev), it convert models to MLIR and then execute on different platforms. + +I have tested it in Rust on CPU and CUDA: https://github.com/egorsmkv/eerie-yolo11 +",[],[],"[{'reaction': '😎', 'users': ['Paltiness', 'John6666', 'Fishtiks', 'marcuscedricridia', 'Anas-Mohiuddin-Syed', 'Ditot', '9voltfan2009', 'dantezxcd'], 'count': 8}, {'reaction': '👍', 'users': ['JLouisBiz', '9voltfan2009', 'dantezxcd', 'on3dj', 'Takugen'], 'count': 5}]",2025-03-30 13:06:42,2025-03-30 13:06:42.541,[],/posts/Yehor/460907086720491,2052,"{'language': 'en', 'probability': 0.8500561118125916}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,204937868725881,"[{'type': 'text', 'value': 'I collected the recitations of the holy quran from 20 different reciters and uploaded the full dataset here:', 'raw': 'I collected the recitations of the holy quran from 20 different reciters and uploaded the full dataset here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'MohamedRashad/Quran-Recitations'}, 'url': 'https://huggingface.co/datasets/MohamedRashad/Quran-Recitations', 'raw': 'https://huggingface.co/datasets/MohamedRashad/Quran-Recitations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out 🥷', 'raw': 'Check it out 🥷'}]","I collected the recitations of the holy quran from 20 different reciters and uploaded the full dataset here: +https://huggingface.co/datasets/MohamedRashad/Quran-Recitations + +Check it out 🥷",[],[],"[{'reaction': '❤️', 'users': ['Paltiness', 'Etherll', 'johnlockejrr', 'Anas-Mohiuddin-Syed', 'dantezxcd', 'eslamx7', 'MohammedHamdy32', 'Abuznaid99', 'Seif4d'], 'count': 9}, {'reaction': '👀', 'users': ['John6666', 'Etherll', 'Anas-Mohiuddin-Syed', 'dantezxcd', 'Takugen'], 'count': 5}, {'reaction': '🤝', 'users': ['hassenhamdi', 'dantezxcd', 'on3dj', 'RISER-LM', 'Takugen'], 'count': 5}, {'reaction': '🔥', 'users': ['eslamx7'], 'count': 1}]",2025-03-30 12:57:24,2025-04-30 08:56:40.310,"[{'_id': '67abdff7bb44ec714c7adb09', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/w6Dt0RZSMBNt1WgLnOsp2.png', 'fullname': 'Seif x DebiaN', 'name': 'Seif4d', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/MohamedRashad/204937868725881,2693,"{'language': 'en', 'probability': 0.8223310708999634}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1650745211725-noauth.png,55.0,Mohammed Hamdy,mmhamdy,915745518136335,"[{'type': 'text', 'value': 'What inspired the Transformer architecture in the ""Attention Is All You Need"" paper? And how were various ideas combined to create this groundbreaking model?', 'raw': 'What inspired the Transformer architecture in the ""Attention Is All You Need"" paper? And how were various ideas combined to create this groundbreaking model?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In this lengthy article, I explore the story and the origins of some of the ideas introduced in the paper. We'll explore everything from the fundamental attention mechanism that lies at its heart to the surprisingly simple explanation for its name, Transformer."", 'raw': ""In this lengthy article, I explore the story and the origins of some of the ideas introduced in the paper. We'll explore everything from the fundamental attention mechanism that lies at its heart to the surprisingly simple explanation for its name, Transformer.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 Examples of ideas explored in the article:', 'raw': '💡 Examples of ideas explored in the article:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ What was the inspiration for the attention mechanism?', 'raw': '✅ What was the inspiration for the attention mechanism?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ How did we go from attention to self-attention?', 'raw': '✅ How did we go from attention to self-attention?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Did the team have any other names in mind for the model?', 'raw': '✅ Did the team have any other names in mind for the model?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and more...', 'raw': 'and more...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I aim to tell the story of Transformers as I would have wanted to read it, and hopefully, one that appeals to others interested in the details of this fascinating idea. This narrative draws from video interviews, lectures, articles, tweets/Xs, and some digging into the literature. I have done my best to be accurate, but errors are possible. If you find inaccuracies or have any additions, please do reach out, and I will gladly make the necessary updates.', 'raw': 'I aim to tell the story of Transformers as I would have wanted to read it, and hopefully, one that appeals to others interested in the details of this fascinating idea. This narrative draws from video interviews, lectures, articles, tweets/Xs, and some digging into the literature. I have done my best to be accurate, but errors are possible. If you find inaccuracies or have any additions, please do reach out, and I will gladly make the necessary updates.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the article: ', 'raw': 'Read the article: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/mmhamdy/pandemonium-the-transformers-story', 'raw': 'https://huggingface.co/blog/mmhamdy/pandemonium-the-transformers-story'}]","What inspired the Transformer architecture in the ""Attention Is All You Need"" paper? And how were various ideas combined to create this groundbreaking model? + +In this lengthy article, I explore the story and the origins of some of the ideas introduced in the paper. We'll explore everything from the fundamental attention mechanism that lies at its heart to the surprisingly simple explanation for its name, Transformer. + +💡 Examples of ideas explored in the article: + +✅ What was the inspiration for the attention mechanism? +✅ How did we go from attention to self-attention? +✅ Did the team have any other names in mind for the model? + +and more... + +I aim to tell the story of Transformers as I would have wanted to read it, and hopefully, one that appeals to others interested in the details of this fascinating idea. This narrative draws from video interviews, lectures, articles, tweets/Xs, and some digging into the literature. I have done my best to be accurate, but errors are possible. If you find inaccuracies or have any additions, please do reach out, and I will gladly make the necessary updates. + +Read the article: https://huggingface.co/blog/mmhamdy/pandemonium-the-transformers-story","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62645f88c39850dc093d6105/cP2MJhRM8OeyAv19Kl0TV.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'Anas-Mohiuddin-Syed', 'Kaba', 'mam1680', 'dantezxcd', 'on3dj', 'Takugen'], 'count': 7}]",2025-03-30 11:25:24,2025-03-30 11:48:38.985,[],/posts/mmhamdy/915745518136335,1648,"{'language': 'en', 'probability': 0.9401974081993103}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,147467454723238,"[{'type': 'text', 'value': '9 Multimodal Chain-of-Thought methods', 'raw': '9 Multimodal Chain-of-Thought methods'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""How Chain-of-Thought (CoT) prompting can unlock models' full potential across images, video, audio and more? Finding special multimodal CoT techniques is the answer."", 'raw': ""How Chain-of-Thought (CoT) prompting can unlock models' full potential across images, video, audio and more? Finding special multimodal CoT techniques is the answer.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here are 9 methods of Multimodal Chain-of-Thought (MCoT). Most of them are open-source:', 'raw': 'Here are 9 methods of Multimodal Chain-of-Thought (MCoT). Most of them are open-source:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. KAM-CoT -> ', 'raw': '1. KAM-CoT -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.12863'}, 'url': 'https://huggingface.co/papers/2401.12863', 'raw': 'https://huggingface.co/papers/2401.12863', 'label': 'KAM-CoT: Knowledge Augmented Multimodal Chain-of-Thoughts Reasoning (2401.12863)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This lightweight framework combines CoT prompting with knowledge graphs (KGs) and achieves 93.87% accuracy', 'raw': 'This lightweight framework combines CoT prompting with knowledge graphs (KGs) and achieves 93.87% accuracy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Multimodal Visualization-of-Thought (MVoT) -> ', 'raw': '2. Multimodal Visualization-of-Thought (MVoT) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2501.07542'}, 'url': 'https://huggingface.co/papers/2501.07542', 'raw': 'https://huggingface.co/papers/2501.07542', 'label': 'Imagine while Reasoning in Space: Multimodal Visualization-of-Thought (2501.07542)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lets models generate visual reasoning traces, using a token discrepancy loss to improve visual quality', 'raw': 'Lets models generate visual reasoning traces, using a token discrepancy loss to improve visual quality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Compositional CoT (CCoT) -> ', 'raw': '3. Compositional CoT (CCoT) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2311.17076'}, 'url': 'https://huggingface.co/papers/2311.17076', 'raw': 'https://huggingface.co/papers/2311.17076', 'label': 'Compositional Chain-of-Thought Prompting for Large Multimodal Models (2311.17076)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Uses scene graph (SG) representations generated by the LMM itself to improve performance on compositional and general multimodal benchmarks', 'raw': 'Uses scene graph (SG) representations generated by the LMM itself to improve performance on compositional and general multimodal benchmarks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. URSA -> ', 'raw': '4. URSA -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2501.04686'}, 'url': 'https://huggingface.co/papers/2501.04686', 'raw': 'https://huggingface.co/papers/2501.04686', 'label': 'URSA: Understanding and Verifying Chain-of-thought Reasoning in\n Multimodal Mathematics (2501.04686)'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Brings System 2-style thinking to multimodal math reasoning, using a 3-module CoT data synthesis process with CoT distillation, trajectory-format rewriting and format unification', 'raw': 'Brings System 2-style thinking to multimodal math reasoning, using a 3-module CoT data synthesis process with CoT distillation, trajectory-format rewriting and format unification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. MM-Verify -> ', 'raw': '5. MM-Verify -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.13383'}, 'url': 'https://huggingface.co/papers/2502.13383', 'raw': 'https://huggingface.co/papers/2502.13383', 'label': 'MM-Verify: Enhancing Multimodal Reasoning with Chain-of-Thought\n Verification (2502.13383)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Introduces a verification mechanism with MM-Verifier and MM-Reasoner that implements synthesized high-quality CoT data for multimodal reasoning', 'raw': 'Introduces a verification mechanism with MM-Verifier and MM-Reasoner that implements synthesized high-quality CoT data for multimodal reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. Duty-Distinct CoT (DDCoT) -> ', 'raw': '6. Duty-Distinct CoT (DDCoT) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2310.16436'}, 'url': 'https://huggingface.co/papers/2310.16436', 'raw': 'https://huggingface.co/papers/2310.16436', 'label': 'DDCoT: Duty-Distinct Chain-of-Thought Prompting for Multimodal Reasoning\n in Language Models (2310.16436)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Divides the reasoning responsibilities between LMs and visual models, integrating the visual recognition capabilities into the joint reasoning process', 'raw': 'Divides the reasoning responsibilities between LMs and visual models, integrating the visual recognition capabilities into the joint reasoning process'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7. Multimodal-CoT from Amazon Web Services -> ', 'raw': '7. Multimodal-CoT from Amazon Web Services -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2302.00923'}, 'url': 'https://huggingface.co/papers/2302.00923', 'raw': 'https://huggingface.co/papers/2302.00923', 'label': 'Multimodal Chain-of-Thought Reasoning in Language Models (2302.00923)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A two-stage framework separates rationale generation from answer prediction, allowing the model to reason more effectively using multimodal inputs', 'raw': 'A two-stage framework separates rationale generation from answer prediction, allowing the model to reason more effectively using multimodal inputs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '8. Graph-of-Thought (GoT) -> ', 'raw': '8. Graph-of-Thought (GoT) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2305.16582'}, 'url': 'https://huggingface.co/papers/2305.16582', 'raw': 'https://huggingface.co/papers/2305.16582', 'label': 'Beyond Chain-of-Thought, Effective Graph-of-Thought Reasoning in Large\n Language Models (2305.16582)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This two-stage framework models reasoning as a graph of interconnected ideas, improving performance on text-only and multimodal tasks', 'raw': 'This two-stage framework models reasoning as a graph of interconnected ideas, improving performance on text-only and multimodal tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More in the comments👇', 'raw': 'More in the comments👇'}]","9 Multimodal Chain-of-Thought methods + +How Chain-of-Thought (CoT) prompting can unlock models' full potential across images, video, audio and more? Finding special multimodal CoT techniques is the answer. + +Here are 9 methods of Multimodal Chain-of-Thought (MCoT). Most of them are open-source: + +1. KAM-CoT -> https://huggingface.co/papers/2401.12863 +This lightweight framework combines CoT prompting with knowledge graphs (KGs) and achieves 93.87% accuracy + +2. Multimodal Visualization-of-Thought (MVoT) -> https://huggingface.co/papers/2501.07542 +Lets models generate visual reasoning traces, using a token discrepancy loss to improve visual quality + +3. Compositional CoT (CCoT) -> https://huggingface.co/papers/2311.17076 +Uses scene graph (SG) representations generated by the LMM itself to improve performance on compositional and general multimodal benchmarks + +4. URSA -> https://huggingface.co/papers/2501.04686 +Brings System 2-style thinking to multimodal math reasoning, using a 3-module CoT data synthesis process with CoT distillation, trajectory-format rewriting and format unification + +5. MM-Verify -> https://huggingface.co/papers/2502.13383 +Introduces a verification mechanism with MM-Verifier and MM-Reasoner that implements synthesized high-quality CoT data for multimodal reasoning + +6. Duty-Distinct CoT (DDCoT) -> https://huggingface.co/papers/2310.16436 +Divides the reasoning responsibilities between LMs and visual models, integrating the visual recognition capabilities into the joint reasoning process + +7. Multimodal-CoT from Amazon Web Services -> https://huggingface.co/papers/2302.00923 +A two-stage framework separates rationale generation from answer prediction, allowing the model to reason more effectively using multimodal inputs + +8. Graph-of-Thought (GoT) -> https://huggingface.co/papers/2305.16582 +This two-stage framework models reasoning as a graph of interconnected ideas, improving performance on text-only and multimodal tasks + +More in the comments👇",[],[],"[{'reaction': '👀', 'users': ['John6666', 'suryakiran786', 'nosuyo', 'dantezxcd', 'on3dj', 'Takugen', 'alexa4cg', 'sourishwicon'], 'count': 8}]",2025-03-30 11:13:54,2025-03-30 11:14:16.462,"[{'_id': '64838b28c235ef76b63e4999', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg', 'fullname': 'Ksenia Se', 'name': 'Kseniase', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 971, 'isFollowing': False}]",/posts/Kseniase/147467454723238,2075,"{'language': 'en', 'probability': 0.7824171185493469}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64862a25cf5ad5e1f0482ef2/61qPUtw9jIl7zpPYmi0VW.jpeg,50.0,David Smooke,Smooke,382988814222009,"[{'type': 'text', 'value': 'Meet the Bot That Reads All the Bad News Headlines So You Don’t Have To ', 'raw': 'Meet the Bot That Reads All the Bad News Headlines So You Don’t Have To '}, {'type': 'link', 'href': 'https://hackernoon.com/meet-the-bot-that-reads-all-the-bad-news-headlines-so-you-dont-have-to', 'raw': 'https://hackernoon.com/meet-the-bot-that-reads-all-the-bad-news-headlines-so-you-dont-have-to'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And More Top ', 'raw': 'And More Top '}, {'type': 'link', 'href': 'https://hackernoon.com/', 'raw': 'https://hackernoon.com/'}, {'type': 'text', 'value': ' blogs today! ', 'raw': ' blogs today! '}, {'type': 'link', 'href': 'https://hackernoon.com/p/publish', 'raw': 'https://hackernoon.com/p/publish'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Truth About Senior Engineering at FAANG—It’s Not What You Expect ', 'raw': 'The Truth About Senior Engineering at FAANG—It’s Not What You Expect '}, {'type': 'link', 'href': 'https://hackernoon.com/the-truth-about-senior-engineering-at-faangits-not-what-you-expect', 'raw': 'https://hackernoon.com/the-truth-about-senior-engineering-at-faangits-not-what-you-expect'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ex-ICE Agent Urges Congress to Double Down on Immigration Surveillance Tech ', 'raw': 'Ex-ICE Agent Urges Congress to Double Down on Immigration Surveillance Tech '}, {'type': 'link', 'href': 'https://hackernoon.com/ex-ice-agent-urges-congress-to-double-down-on-immigration-surveillance-tech', 'raw': 'https://hackernoon.com/ex-ice-agent-urges-congress-to-double-down-on-immigration-surveillance-tech'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Too Many AIs With Too Many Terrible Names: How to Choose Your AI Model ', 'raw': 'Too Many AIs With Too Many Terrible Names: How to Choose Your AI Model '}, {'type': 'link', 'href': 'https://hackernoon.com/too-many-ais-with-too-many-terrible-names-how-to-choose-your-ai-model', 'raw': 'https://hackernoon.com/too-many-ais-with-too-many-terrible-names-how-to-choose-your-ai-model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Nobody Wants to Pay for Apps Anymore—Except When AI Is Involved ', 'raw': 'Nobody Wants to Pay for Apps Anymore—Except When AI Is Involved '}, {'type': 'link', 'href': 'https://hackernoon.com/nobody-wants-to-pay-for-apps-anymoreexcept-when-ai-is-involved', 'raw': 'https://hackernoon.com/nobody-wants-to-pay-for-apps-anymoreexcept-when-ai-is-involved'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Nvidia GTC 2025: AI Goes Big, Robots Get Smarter, and GPUs Rule the World ', 'raw': 'Nvidia GTC 2025: AI Goes Big, Robots Get Smarter, and GPUs Rule the World '}, {'type': 'link', 'href': 'https://hackernoon.com/nvidia-gtc-2025-ai-goes-big-robots-get-smarter-and-gpus-rule-the-world', 'raw': 'https://hackernoon.com/nvidia-gtc-2025-ai-goes-big-robots-get-smarter-and-gpus-rule-the-world'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'C++ Metaprogramming: Compilation of Calculations, from Basic Techniques to Advanced Methods ', 'raw': 'C++ Metaprogramming: Compilation of Calculations, from Basic Techniques to Advanced Methods '}, {'type': 'link', 'href': 'https://hackernoon.com/c-metaprogramming-compilation-of-calculations-from-basic-techniques-to-advanced-methods', 'raw': 'https://hackernoon.com/c-metaprogramming-compilation-of-calculations-from-basic-techniques-to-advanced-methods'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Has Google Made a $32 Billion Cloud Security Blunder? ', 'raw': 'Has Google Made a $32 Billion Cloud Security Blunder? '}, {'type': 'link', 'href': 'https://hackernoon.com/has-google-made-a-$32-billion-cloud-security-blunder', 'raw': 'https://hackernoon.com/has-google-made-a-$32-billion-cloud-security-blunder'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ripple in Time: Is XRP About to Go Parabolic in 2025? ', 'raw': 'Ripple in Time: Is XRP About to Go Parabolic in 2025? '}, {'type': 'link', 'href': 'https://hackernoon.com/ripple-in-time-is-xrp-about-to-go-parabolic-in-2025', 'raw': 'https://hackernoon.com/ripple-in-time-is-xrp-about-to-go-parabolic-in-2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Your Next Tech Job? Vibe Coding ', 'raw': 'Your Next Tech Job? Vibe Coding '}, {'type': 'link', 'href': 'https://hackernoon.com/your-next-tech-job-vibe-coding', 'raw': 'https://hackernoon.com/your-next-tech-job-vibe-coding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ethereum Block Building: The Hidden Economy Behind Every Transaction ', 'raw': 'Ethereum Block Building: The Hidden Economy Behind Every Transaction '}, {'type': 'link', 'href': 'https://hackernoon.com/ethereum-block-building-the-hidden-economy-behind-every-transaction', 'raw': 'https://hackernoon.com/ethereum-block-building-the-hidden-economy-behind-every-transaction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Building a Robust JS/TS Monorepo: Best Practices with Yarn, NX and Changesets ', 'raw': 'Building a Robust JS/TS Monorepo: Best Practices with Yarn, NX and Changesets '}, {'type': 'link', 'href': 'https://hackernoon.com/building-a-robust-jsts-monorepo-best-practices-with-yarn-nx-and-changesets', 'raw': 'https://hackernoon.com/building-a-robust-jsts-monorepo-best-practices-with-yarn-nx-and-changesets'}]","Meet the Bot That Reads All the Bad News Headlines So You Don’t Have To https://hackernoon.com/meet-the-bot-that-reads-all-the-bad-news-headlines-so-you-dont-have-to + +And More Top https://hackernoon.com/ blogs today! https://hackernoon.com/p/publish + +The Truth About Senior Engineering at FAANG—It’s Not What You Expect https://hackernoon.com/the-truth-about-senior-engineering-at-faangits-not-what-you-expect + +Ex-ICE Agent Urges Congress to Double Down on Immigration Surveillance Tech https://hackernoon.com/ex-ice-agent-urges-congress-to-double-down-on-immigration-surveillance-tech + +Too Many AIs With Too Many Terrible Names: How to Choose Your AI Model https://hackernoon.com/too-many-ais-with-too-many-terrible-names-how-to-choose-your-ai-model + +Nobody Wants to Pay for Apps Anymore—Except When AI Is Involved https://hackernoon.com/nobody-wants-to-pay-for-apps-anymoreexcept-when-ai-is-involved + +Nvidia GTC 2025: AI Goes Big, Robots Get Smarter, and GPUs Rule the World https://hackernoon.com/nvidia-gtc-2025-ai-goes-big-robots-get-smarter-and-gpus-rule-the-world + +C++ Metaprogramming: Compilation of Calculations, from Basic Techniques to Advanced Methods https://hackernoon.com/c-metaprogramming-compilation-of-calculations-from-basic-techniques-to-advanced-methods + +Has Google Made a $32 Billion Cloud Security Blunder? https://hackernoon.com/has-google-made-a-$32-billion-cloud-security-blunder + +Ripple in Time: Is XRP About to Go Parabolic in 2025? https://hackernoon.com/ripple-in-time-is-xrp-about-to-go-parabolic-in-2025 + +Your Next Tech Job? Vibe Coding https://hackernoon.com/your-next-tech-job-vibe-coding + +Ethereum Block Building: The Hidden Economy Behind Every Transaction https://hackernoon.com/ethereum-block-building-the-hidden-economy-behind-every-transaction + +Building a Robust JS/TS Monorepo: Best Practices with Yarn, NX and Changesets https://hackernoon.com/building-a-robust-jsts-monorepo-best-practices-with-yarn-nx-and-changesets","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64862a25cf5ad5e1f0482ef2/7_XqTHOEXWPI1KNEMfUTO.png'}]",[],"[{'reaction': '❤️', 'users': ['modenn', 'xanzzzz', 'kuma21', 'xinnn63', 'dantezxcd'], 'count': 5}, {'reaction': '👀', 'users': ['John6666', 'Sibetzy', 'robb-0', 'dantezxcd'], 'count': 4}, {'reaction': '🤝', 'users': ['dubiousx', 'Takugen', 'dantezxcd'], 'count': 3}]",2025-03-26 14:37:27,2025-03-26 14:37:27.389,[],/posts/Smooke/382988814222009,1189,"{'language': 'en', 'probability': 0.751255452632904}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6271a90e7b9f120adb3adff1/-WeBn2fA9Z1KHuXfHeBJp.png,11.0,Kshitiz Khanal,kshitizkhanal7,793397895832478,"[{'type': 'text', 'value': 'Wrote a new article on: Building Collaborative AI: How to Train LLM and VLM Agents to Work Together ', 'raw': 'Wrote a new article on: Building Collaborative AI: How to Train LLM and VLM Agents to Work Together '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/kshitizkhanal7/train-agents-together', 'raw': 'https://huggingface.co/blog/kshitizkhanal7/train-agents-together'}]","Wrote a new article on: Building Collaborative AI: How to Train LLM and VLM Agents to Work Together +https://huggingface.co/blog/kshitizkhanal7/train-agents-together",[],[],"[{'reaction': '❤️', 'users': ['modenn', 'xanzzzz', 'kuma21', 'xinnn63', 'Takugen', 'dantezxcd'], 'count': 6}, {'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-03-26 14:27:17,2025-03-26 14:27:17.019,[],/posts/kshitizkhanal7/793397895832478,1090,"{'language': 'en', 'probability': 0.7133020162582397}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6051e59531c5be7f3dd5ebc9/iW1huuI60224DPBzn2cki.jpeg,141.0,Giada Pistilli,giadap,214321702054817,"[{'type': 'text', 'value': 'We\'ve all become experts at clicking ""I agree"" without a second thought. In my latest blog post, I explore why these traditional consent models are increasingly problematic in the age of generative AI. ', 'raw': 'We\'ve all become experts at clicking ""I agree"" without a second thought. In my latest blog post, I explore why these traditional consent models are increasingly problematic in the age of generative AI. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I found three fundamental challenges:', 'raw': 'I found three fundamental challenges:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Scope problem: how can you know what you're agreeing to when AI could use your data in different ways?"", 'raw': ""- Scope problem: how can you know what you're agreeing to when AI could use your data in different ways?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Temporality problem: once an AI system learns from your data, good luck trying to make it ""unlearn"" it.', 'raw': '- Temporality problem: once an AI system learns from your data, good luck trying to make it ""unlearn"" it.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Autonomy trap: the data you share today could create systems that pigeonhole you tomorrow.', 'raw': '- Autonomy trap: the data you share today could create systems that pigeonhole you tomorrow.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Individual users shouldn\'t bear all the responsibility, while big tech holds all the cards. We need better approaches to level the playing field, from collective advocacy and stronger technological safeguards to establishing ""data fiduciaries"" with a legal duty to protect our digital interests.', 'raw': 'Individual users shouldn\'t bear all the responsibility, while big tech holds all the cards. We need better approaches to level the playing field, from collective advocacy and stronger technological safeguards to establishing ""data fiduciaries"" with a legal duty to protect our digital interests.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Available here: ', 'raw': 'Available here: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/giadap/beyond-consent', 'raw': 'https://huggingface.co/blog/giadap/beyond-consent'}]","We've all become experts at clicking ""I agree"" without a second thought. In my latest blog post, I explore why these traditional consent models are increasingly problematic in the age of generative AI. + +I found three fundamental challenges: +- Scope problem: how can you know what you're agreeing to when AI could use your data in different ways? +- Temporality problem: once an AI system learns from your data, good luck trying to make it ""unlearn"" it. +- Autonomy trap: the data you share today could create systems that pigeonhole you tomorrow. + +Individual users shouldn't bear all the responsibility, while big tech holds all the cards. We need better approaches to level the playing field, from collective advocacy and stronger technological safeguards to establishing ""data fiduciaries"" with a legal duty to protect our digital interests. + +Available here: https://huggingface.co/blog/giadap/beyond-consent",[],[],"[{'reaction': '🔥', 'users': ['brunatrevelin', 'jsulz', 'John6666', 'yjernite', 'AdinaY', 'ZennyKenny', 'wsuff', 'syariffadilla', 'Nacho93', 'xinnn63', 'Mahdiyyah4', 'Takugen', 'dantezxcd'], 'count': 13}, {'reaction': '❤️', 'users': ['modenn', 'xanzzzz', 'kuma21', 'xinnn63', 'robb-0', 'saba1234', 'yjernite', 'dantezxcd'], 'count': 8}]",2025-03-26 14:10:56,2025-03-26 14:10:56.843,[],/posts/giadap/214321702054817,2360,"{'language': 'en', 'probability': 0.9327138662338257}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,318268301919621,"[{'type': 'text', 'value': '📚 Archive of Our Own (AO3) Dataset - ', 'raw': '📚 Archive of Our Own (AO3) Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/archiveofourown'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/archiveofourown', 'raw': 'https://huggingface.co/datasets/nyuuzyou/archiveofourown'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection of approximately 12.6 million fanfiction works (from 63.2M processed IDs) featuring:', 'raw': 'Collection of approximately 12.6 million fanfiction works (from 63.2M processed IDs) featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Full text content from diverse fandoms across television, film, books, anime, and more', 'raw': '- Full text content from diverse fandoms across television, film, books, anime, and more'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Comprehensive metadata including warnings, relationships, characters, and tags', 'raw': '- Comprehensive metadata including warnings, relationships, characters, and tags'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual content with works in 40+ languages though English predominant', 'raw': '- Multilingual content with works in 40+ languages though English predominant'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Rich classification data preserving author-created folksonomy and content categorization', 'raw': '- Rich classification data preserving author-created folksonomy and content categorization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""P.S. This is the most expensive dataset I've created so far! And also, thank you all for the 100 followers on Hugging Face!"", 'raw': ""P.S. This is the most expensive dataset I've created so far! And also, thank you all for the 100 followers on Hugging Face!""}]","📚 Archive of Our Own (AO3) Dataset - https://huggingface.co/datasets/nyuuzyou/archiveofourown + +Collection of approximately 12.6 million fanfiction works (from 63.2M processed IDs) featuring: +- Full text content from diverse fandoms across television, film, books, anime, and more +- Comprehensive metadata including warnings, relationships, characters, and tags +- Multilingual content with works in 40+ languages though English predominant +- Rich classification data preserving author-created folksonomy and content categorization + +P.S. This is the most expensive dataset I've created so far! And also, thank you all for the 100 followers on Hugging Face!",[],[],"[{'reaction': '❤️', 'users': ['modenn', 'xanzzzz', 'kuma21', 'gwehhkulbetzz', 'xinnn63', 'jkeisling', 'ZeroWw', 'starsinwinter', 'Ba2han', 'dantezxcd', 'franc1s', 'sivarajan', 'BigBruhBruh', 'SapSapphireFR', 'SpacehogSutra'], 'count': 15}, {'reaction': '👍', 'users': ['John6666', 'Kayplanet', 'xinnn63', 'starsinwinter', 'Takugen', 'dantezxcd', 'franc1s', 'sivarajan', 'BigBruhBruh'], 'count': 9}, {'reaction': '👀', 'users': ['robb-0', 'dantezxcd', 'franc1s', 'BigBruhBruh', 'SpacehogSutra'], 'count': 5}, {'reaction': '😔', 'users': ['SpaceTruck', 'Ihateao3copywriteinfringment', 'jesibell'], 'count': 3}, {'reaction': '😎', 'users': ['SpacehogSutra'], 'count': 1}, {'reaction': '🤝', 'users': ['SpacehogSutra'], 'count': 1}, {'reaction': '🚀', 'users': ['SpacehogSutra'], 'count': 1}]",2025-03-26 14:07:28,2025-04-30 15:20:16.461,"[{'_id': '643ac5d2e2b979ae6144d68c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png', 'fullname': 'nyuuzyou', 'name': 'nyuuzyou', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244, 'isFollowing': False}, {'_id': '680ae80f2c4b584e1d73c264', 'avatarUrl': '/avatars/38cea1d5c4f8d9cef99cfee941d4dff5.svg', 'fullname': 'bobby', 'name': 'BOBOBOBOBBOBOOB', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '68098bb400323efc438fb7bf', 'avatarUrl': '/avatars/0755e3a54c372db62afeda8127e0af4d.svg', 'fullname': 'Not happy with you', 'name': 'DevilRollAO3', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '680b65dca9c2772d241dc040', 'avatarUrl': '/avatars/c0994b798eaaca2772db034321b47841.svg', 'fullname': 'creativity ishuman', 'name': 'creativityishuman', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '680e0ef985293771bc626fc3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/40NxROJJhZJoLSyXu9HC4.jpeg', 'fullname': 'Stop It', 'name': 'Ihateao3copywriteinfringment', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '680a72706eec58b0d9ef41f1', 'avatarUrl': '/avatars/21d5ba646e983703ef147d771420dfd0.svg', 'fullname': 'reyusty', 'name': 'reyusty', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '680de939a32b0afc5fcdc80a', 'avatarUrl': '/avatars/8aab72fc46cbb5f7661c8454d2a2ba2c.svg', 'fullname': 'Nein Nope', 'name': 'SpaceTruck', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '680f78d2b9973151cce58b95', 'avatarUrl': '/avatars/3f244c543b02813f932b6e55b3c60973.svg', 'fullname': 'AISucks', 'name': 'DefendArtistsandCreatives', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '680fb3b6aabbfc79d06c5036', 'avatarUrl': '/avatars/e85de6a28fcf0fb1f0191f95723542f4.svg', 'fullname': 'die die die', 'name': 'leaveao3alone', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6303c56ffc783bfc7441ad0a', 'avatarUrl': '/avatars/3c05c56577b941c286f58841e63ec28b.svg', 'fullname': 'francis ephe', 'name': 'franc1s', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6810099e8f64955417f06809', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/5WK8NWNC13ok_bte9Waz3.png', 'fullname': 'Eala Chopair-Gorm', 'name': 'ealachopairgorm', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '68117059e49f9c8d6b6c6f04', 'avatarUrl': '/avatars/f9a35af851304e4f09c87808260fc559.svg', 'fullname': 'j d', 'name': 'jesibell', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6612ca83d73af9e941b93889', 'avatarUrl': '/avatars/9ab1f501bc68260e44bb54c97cc30583.svg', 'fullname': 'Henry Higginbottom', 'name': 'SpacehogSutra', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/nyuuzyou/318268301919621,2740,"{'language': 'en', 'probability': 0.8207980990409851}",44 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,999738107053598,"[{'type': 'text', 'value': 'Latest DeepSeek V3 0324 did better than previous version in many domains such as health, nutrition, fasting, bitcoin.', 'raw': 'Latest DeepSeek V3 0324 did better than previous version in many domains such as health, nutrition, fasting, bitcoin.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Who wants to see some example change of answers between the two models?', 'raw': 'Who wants to see some example change of answers between the two models?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://sheet.zoho.com/sheet/open/mz41j09cc640a29ba47729fed784a263c1d08', 'raw': 'https://sheet.zoho.com/sheet/open/mz41j09cc640a29ba47729fed784a263c1d08'}]","Latest DeepSeek V3 0324 did better than previous version in many domains such as health, nutrition, fasting, bitcoin. + +Who wants to see some example change of answers between the two models? + +https://sheet.zoho.com/sheet/open/mz41j09cc640a29ba47729fed784a263c1d08","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a488b5224f96d8cc3754fc/WpZV-uNxIemTqO_LdM1MO.png'}]",[],"[{'reaction': '❤️', 'users': ['Makar7', 'PujiPramusanto', 'modenn', 'xanzzzz', 'kuma21', 'xinnn63', 'Moibe', 'Takugen', 'dantezxcd'], 'count': 9}, {'reaction': '👀', 'users': ['John6666', 'Makar7', 'xinnn63', 'robb-0', 'dantezxcd'], 'count': 5}]",2025-03-26 04:22:12,2025-03-28 18:04:24.196,"[{'_id': '6613f7ae43c4456e13ecbdcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg', 'fullname': 'Jordan Legg', 'name': 'takarajordan', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 40, 'isFollowing': False}, {'_id': '65a488b5224f96d8cc3754fc', 'avatarUrl': '/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg', 'fullname': 'Emin Temiz', 'name': 'etemiz', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 46, 'isFollowing': False}]",/posts/etemiz/999738107053598,1951,"{'language': 'en', 'probability': 0.9205775260925293}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg,3938.0,chansung park,chansung,621792589205071,"[{'type': 'text', 'value': 'simple guide on the recipe for GRPO on Open-R1 which is built on top of TRL ', 'raw': 'simple guide on the recipe for GRPO on Open-R1 which is built on top of TRL '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I think FastAPI wrapper of vLLM with WeightSyncWorker is pretty cool feature. Also, we have many predefined reward functions out of the box!', 'raw': 'I think FastAPI wrapper of vLLM with WeightSyncWorker is pretty cool feature. Also, we have many predefined reward functions out of the box!'}]","simple guide on the recipe for GRPO on Open-R1 which is built on top of TRL + +I think FastAPI wrapper of vLLM with WeightSyncWorker is pretty cool feature. Also, we have many predefined reward functions out of the box!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60d3b57ad7b174177faabd6e/OJMgth4ZAxYCfYoPAX9sR.jpeg'}]",[],"[{'reaction': '❤️', 'users': ['chansung', 'MUSTYGRAM', 'smirki', 'analytics206', 'John6666', 'dal4933', 'lunarflu', 'Makar7', 'dudukuyz', 'modenn', 'xanzzzz', 'kuma21', 'xinnn63', 'Mahdiyyah4', 'Takugen', 'dantezxcd'], 'count': 16}]",2025-03-25 22:33:03,2025-03-27 01:00:38.393,"[{'_id': '6613f7ae43c4456e13ecbdcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg', 'fullname': 'Jordan Legg', 'name': 'takarajordan', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 40, 'isFollowing': False}, {'_id': '60d3b57ad7b174177faabd6e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg', 'fullname': 'chansung park', 'name': 'chansung', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3938, 'isFollowing': False}, {'_id': '64d1129297ca59bcf7458d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64d1129297ca59bcf7458d07/54J83Jv0Mk2CEX73Bx1_g.jpeg', 'fullname': 'Manav Majumdar', 'name': 'smirki', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 81, 'isFollowing': False}, {'_id': '67e49ebaa13c43984be452bf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67e49ebaa13c43984be452bf/wLJoiE1HL_iPNjghbdgjC.jpeg', 'fullname': 'Natalie H', 'name': 'xinnn63', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/chansung/621792589205071,3873,"{'language': 'en', 'probability': 0.9330673813819885}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/60cd486d723acf5eb46fe8d3/Z1bD1kjvZ0QAOjZna41Xr.jpeg,62.0,Waseem AlShikh,wassemgtk,397348237653076,"[{'type': 'text', 'value': 'For fun, a new project: SuperTokenizer! A BPE tokenizer trained on C4 to beat GPT-4. Byte-level, A100-powered, and open-source. Messing around with tokens! ', 'raw': 'For fun, a new project: SuperTokenizer! A BPE tokenizer trained on C4 to beat GPT-4. Byte-level, A100-powered, and open-source. Messing around with tokens! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/wassemgtk/SuperTokenizer', 'raw': 'https://github.com/wassemgtk/SuperTokenizer'}]","For fun, a new project: SuperTokenizer! A BPE tokenizer trained on C4 to beat GPT-4. Byte-level, A100-powered, and open-source. Messing around with tokens! +https://github.com/wassemgtk/SuperTokenizer",[],[],"[{'reaction': '❤️', 'users': ['stefan-it', 'Kayplanet', 'PujiPramusanto', 'modenn', 'xanzzzz', 'kuma21', 'xinnn63', 'dantezxcd'], 'count': 8}, {'reaction': '👍', 'users': ['JLouisBiz', 'John6666', 'bcci', 'xinnn63', 'Takugen', 'dantezxcd'], 'count': 6}, {'reaction': '🤯', 'users': ['robb-0', 'dantezxcd'], 'count': 2}, {'reaction': '👀', 'users': ['dantezxcd'], 'count': 1}]",2025-03-25 18:17:51,2025-03-26 09:38:00.291,"[{'_id': '6613f7ae43c4456e13ecbdcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg', 'fullname': 'Jordan Legg', 'name': 'takarajordan', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 40, 'isFollowing': False}]",/posts/wassemgtk/397348237653076,2109,"{'language': 'en', 'probability': 0.8091275691986084}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1654278567459-626a9bfa03e2e2796f24ca11.jpeg,295.0,Freddy Boulton,freddyaboulton,609815746401982,"[{'type': 'text', 'value': 'Ever wanted to share your AI creations with friends? ✨', 'raw': 'Ever wanted to share your AI creations with friends? ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Screenshots are fine, but imagine letting others play with your ACTUAL model!', 'raw': 'Screenshots are fine, but imagine letting others play with your ACTUAL model!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Introducing Gradio deep links 🔗 - now you can share interactive AI apps, not just images.', 'raw': 'Introducing Gradio deep links 🔗 - now you can share interactive AI apps, not just images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Add a gr.DeepLinkButton to any app and get shareable URLs that let ANYONE experiment with your models.', 'raw': 'Add a gr.DeepLinkButton to any app and get shareable URLs that let ANYONE experiment with your models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Ever wanted to share your AI creations with friends? ✨ + +Screenshots are fine, but imagine letting others play with your ACTUAL model! + +Introducing Gradio deep links 🔗 - now you can share interactive AI apps, not just images. + +Add a gr.DeepLinkButton to any app and get shareable URLs that let ANYONE experiment with your models. + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626a9bfa03e2e2796f24ca11/AWxQoD2yxfgLdLSDp2jEt.mp4'}]",[],"[{'reaction': '❤️', 'users': ['modenn', 'kuma21', 'xinnn63', 'Takugen', 'dantezxcd', 'Fishtiks'], 'count': 6}, {'reaction': '👀', 'users': ['John6666', 'xinnn63', 'robb-0', 'dantezxcd'], 'count': 4}]",2025-03-25 18:00:25,2025-03-25 18:00:25.412,[],/posts/freddyaboulton/609815746401982,2143,"{'language': 'en', 'probability': 0.8916387557983398}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1624629516652-5ff5d596f244529b3ec0fb89.png,873.0,Philipp Schmid,philschmid,709514015104990,"[{'type': 'text', 'value': 'Gemini 2.5 Pro, thinking by default! We excited launch our best Gemini model for reasoning, multimodal and coding yet! #1 on LMSYS, Humanity’s Last Exam, AIME and GPQA and more! ', 'raw': 'Gemini 2.5 Pro, thinking by default! We excited launch our best Gemini model for reasoning, multimodal and coding yet! #1 on LMSYS, Humanity’s Last Exam, AIME and GPQA and more! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TL;DR:', 'raw': 'TL;DR:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 💻\xa0Best Gemini coding model yet, particularly for web development (excels on LiveCodeBench).', 'raw': '- 💻\xa0Best Gemini coding model yet, particularly for web development (excels on LiveCodeBench).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🧠\xa0Default ""Thinking"" with up to 64k token output', 'raw': '- 🧠\xa0Default ""Thinking"" with up to 64k token output'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🌌\xa01 Million multimodal input\xa0context for text, image, video, audio, and pdf', 'raw': '- 🌌\xa01 Million multimodal input\xa0context for text, image, video, audio, and pdf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🛠️\xa0Function calling, structured output, google search & code execution.', 'raw': '- 🛠️\xa0Function calling, structured output, google search & code execution.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- 🏆\xa0\xa0#1 on LMArena & sota on AIME, GPQA, Humanity's Last Exam"", 'raw': ""- 🏆\xa0\xa0#1 on LMArena & sota on AIME, GPQA, Humanity's Last Exam""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 💡\xa0Knowledge cut of\xa0January 2025', 'raw': '- 💡\xa0Knowledge cut of\xa0January 2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🤗\xa0Available for free as Experimental in AI Studio, Gemini API & Gemini APP', 'raw': '- 🤗\xa0Available for free as Experimental in AI Studio, Gemini API & Gemini APP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🚀\xa0Rate limits - Free 2 RPM 50 req/day', 'raw': '- 🚀\xa0Rate limits - Free 2 RPM 50 req/day'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it ⬇️', 'raw': 'Try it ⬇️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://aistudio.google.com/?model=gemini-2.5-pro-exp-03-25', 'raw': 'https://aistudio.google.com/?model=gemini-2.5-pro-exp-03-25'}]","Gemini 2.5 Pro, thinking by default! We excited launch our best Gemini model for reasoning, multimodal and coding yet! #1 on LMSYS, Humanity’s Last Exam, AIME and GPQA and more! + +TL;DR: +- 💻 Best Gemini coding model yet, particularly for web development (excels on LiveCodeBench). +- 🧠 Default ""Thinking"" with up to 64k token output +- 🌌 1 Million multimodal input context for text, image, video, audio, and pdf +- 🛠️ Function calling, structured output, google search & code execution. +- 🏆  #1 on LMArena & sota on AIME, GPQA, Humanity's Last Exam +- 💡 Knowledge cut of January 2025 +- 🤗 Available for free as Experimental in AI Studio, Gemini API & Gemini APP +- 🚀 Rate limits - Free 2 RPM 50 req/day + +Try it ⬇️ + +https://aistudio.google.com/?model=gemini-2.5-pro-exp-03-25",[],[],"[{'reaction': '🔥', 'users': ['freddyaboulton', 'John6666', 'bcci', 'PujiPramusanto', 'kuma21', 'Closgro', 'xinnn63', 'Takugen', 'cahlen', 'dantezxcd', 'th5'], 'count': 11}, {'reaction': '👀', 'users': ['robb-0', 'cahlen', 'dantezxcd'], 'count': 3}, {'reaction': '🧠', 'users': ['cahlen', 'dantezxcd'], 'count': 2}]",2025-03-25 17:54:27,2025-04-02 04:15:13.209,"[{'_id': '6303cc3e1dd5d3c62483bd51', 'avatarUrl': '/avatars/afbc48df2e8c47c35be48168113d83c0.svg', 'fullname': 's', 'name': 'Tom-Neverwinter', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '676d567baeae4cac152ae26e', 'avatarUrl': '/avatars/4a578a6cd236bb167972c28b9a02d881.svg', 'fullname': 'Eric Nunes', 'name': 'ericnunes1', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '60f3555ad0d225cb72755cf1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1626560096722-60f3555ad0d225cb72755cf1.png', 'fullname': 'Salim Belhaddad', 'name': 'salym', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/philschmid/709514015104990,3416,"{'language': 'en', 'probability': 0.7148236632347107}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/64d1129297ca59bcf7458d07/54J83Jv0Mk2CEX73Bx1_g.jpeg,81.0,Manav Majumdar,smirki,138011292240971,"[{'type': 'text', 'value': 'I was able to make a demo dashboard application with my react model through prompting. You can play with it here: ', 'raw': 'I was able to make a demo dashboard application with my react model through prompting. You can play with it here: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Tesslate/Tessa-T1-14B'}, 'url': 'https://huggingface.co/Tesslate/Tessa-T1-14B', 'raw': 'https://huggingface.co/Tesslate/Tessa-T1-14B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'http://playcode.io/2309196', 'raw': 'http://playcode.io/2309196'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What my react model made (prompted each file individually)', 'raw': 'What my react model made (prompted each file individually)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""Ex.\nCreate a React component named Header that accepts the following props:\n\nlogo (string): the URL to the logo image\n\ntitle (string): the title text to display\n\nmenuItems (array of objects): each object should contain a label (string) and href (string)\nThe Header should render a logo (an ), the title (e.g., in an

), and a navigation menu with links. The component should be responsive with a mobile menu option. Export it as the default export.\n\nIt should be one of the coolest things I've ever seen. Have it have a search and profile login and almost every feature that is really nice in a header. It should be framer level quality."", 'raw': ""```\nEx.\nCreate a React component named Header that accepts the following props:\n\nlogo (string): the URL to the logo image\n\ntitle (string): the title text to display\n\nmenuItems (array of objects): each object should contain a label (string) and href (string)\nThe Header should render a logo (an ), the title (e.g., in an

), and a navigation menu with links. The component should be responsive with a mobile menu option. Export it as the default export.\n\nIt should be one of the coolest things I've ever seen. Have it have a search and profile login and almost every feature that is really nice in a header. It should be framer level quality.\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And a final prompt:', 'raw': 'And a final prompt:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""Construct a React component named Dashboard that integrates the Header, Sidebar, MainContent, and Footer components. (These should all be imports) This component should:\n\nState Management: Maintain a state variable activeTab (string) using React’s useState hook, defaulting to an initial value (e.g., 'dashboard').\n\nState Propagation: Pass activeTab and a state update function (e.g., setActiveTab) to the Sidebar component via the onTabChange prop. Also pass activeTab to MainContent so that it knows which content to render.\n\nLayout: Arrange the components using a responsive layout. Place the Header at the top, a flex container for the body with the Sidebar on the left and MainContent on the right, and the Footer at the bottom.\n\nStyling: Use inline styles or CSS classes for basic layout structure (e.g., flexbox, grid). Export Dashboard as the default export."", 'raw': ""```\nConstruct a React component named Dashboard that integrates the Header, Sidebar, MainContent, and Footer components. (These should all be imports) This component should:\n\nState Management: Maintain a state variable activeTab (string) using React’s useState hook, defaulting to an initial value (e.g., 'dashboard').\n\nState Propagation: Pass activeTab and a state update function (e.g., setActiveTab) to the Sidebar component via the onTabChange prop. Also pass activeTab to MainContent so that it knows which content to render.\n\nLayout: Arrange the components using a responsive layout. Place the Header at the top, a flex container for the body with the Sidebar on the left and MainContent on the right, and the Footer at the bottom.\n\nStyling: Use inline styles or CSS classes for basic layout structure (e.g., flexbox, grid). Export Dashboard as the default export.\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","I was able to make a demo dashboard application with my react model through prompting. You can play with it here: https://huggingface.co/Tesslate/Tessa-T1-14B + +http://playcode.io/2309196 + +What my react model made (prompted each file individually) +``` +Ex. +Create a React component named Header that accepts the following props: + +logo (string): the URL to the logo image + +title (string): the title text to display + +menuItems (array of objects): each object should contain a label (string) and href (string) +The Header should render a logo (an ), the title (e.g., in an

), and a navigation menu with links. The component should be responsive with a mobile menu option. Export it as the default export. + +It should be one of the coolest things I've ever seen. Have it have a search and profile login and almost every feature that is really nice in a header. It should be framer level quality. +``` + +And a final prompt: +``` +Construct a React component named Dashboard that integrates the Header, Sidebar, MainContent, and Footer components. (These should all be imports) This component should: + +State Management: Maintain a state variable activeTab (string) using React’s useState hook, defaulting to an initial value (e.g., 'dashboard'). + +State Propagation: Pass activeTab and a state update function (e.g., setActiveTab) to the Sidebar component via the onTabChange prop. Also pass activeTab to MainContent so that it knows which content to render. + +Layout: Arrange the components using a responsive layout. Place the Header at the top, a flex container for the body with the Sidebar on the left and MainContent on the right, and the Footer at the bottom. + +Styling: Use inline styles or CSS classes for basic layout structure (e.g., flexbox, grid). Export Dashboard as the default export. +``` + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64d1129297ca59bcf7458d07/yXnKqa8ktPttkHzTPuVdG.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64d1129297ca59bcf7458d07/npVw2AHzDGkVUAPvTLsIi.png'}]",[],"[{'reaction': '❤️', 'users': ['kuma21', 'xinnn63', 'Dev9124', 'Takugen', 'dantezxcd'], 'count': 5}, {'reaction': '👀', 'users': ['John6666', 'xinnn63', 'dantezxcd'], 'count': 3}]",2025-03-25 17:34:46,2025-03-27 13:03:57.203,[],/posts/smirki/138011292240971,1168,"{'language': 'en', 'probability': 0.831882655620575}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/uth49Pqb91NHdCJYH6vcz.png,3.0,Zlatin Balevsky,zlatinb,969200508438848,"[{'type': 'text', 'value': 'JFK declassified documents datasets', 'raw': 'JFK declassified documents datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Hello, I've prepared two datasets (raw and cleaned) of the recently declassified documents related to the assassination of President John F. Kennedy."", 'raw': ""Hello, I've prepared two datasets (raw and cleaned) of the recently declassified documents related to the assassination of President John F. Kennedy.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Raw ', 'raw': 'Raw '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'zlatinb/jfk-2025-raw'}, 'url': 'https://huggingface.co/datasets/zlatinb/jfk-2025-raw', 'raw': 'https://huggingface.co/datasets/zlatinb/jfk-2025-raw'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cleaned ', 'raw': 'Cleaned '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'zlatinb/jfk-2025-cleaned'}, 'url': 'https://huggingface.co/datasets/zlatinb/jfk-2025-cleaned', 'raw': 'https://huggingface.co/datasets/zlatinb/jfk-2025-cleaned'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The 2182 documents cover a vast range of topics, so it may be interesting to train on them to generate insights.', 'raw': 'The 2182 documents cover a vast range of topics, so it may be interesting to train on them to generate insights.'}]","JFK declassified documents datasets + +Hello, I've prepared two datasets (raw and cleaned) of the recently declassified documents related to the assassination of President John F. Kennedy. + +Raw https://huggingface.co/datasets/zlatinb/jfk-2025-raw +Cleaned https://huggingface.co/datasets/zlatinb/jfk-2025-cleaned + +The 2182 documents cover a vast range of topics, so it may be interesting to train on them to generate insights.",[],[],"[{'reaction': '👍', 'users': ['Impulse2000', 'legacy090', 'Jah11', 'CATAMERCA', 'alexander583', 'xinnn63', 'dantezxcd'], 'count': 7}, {'reaction': '😎', 'users': ['John6666', 'Impulse2000', 'Takugen', 'dantezxcd'], 'count': 4}]",2025-03-21 04:33:15,2025-03-21 04:33:15.241,[],/posts/zlatinb/969200508438848,1918,"{'language': 'en', 'probability': 0.8826546669006348}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,142231502952502,"[{'type': 'text', 'value': 'Finally, the ground truth / AlexNet’s original source code is available to all. ', 'raw': 'Finally, the ground truth / AlexNet’s original source code is available to all. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Context: AlexNet had a historic win in the 2012 ImageNet Large Scale Visual Recognition Challenge (ILSVRC), reducing error rate from 26% (previous best) to 15.3%. It’s a deep CNN with 8 layers (5 convolutional + 3 fully connected), pioneering the use of ReLU activations for faster training, dropout for regularization, and GPU acceleration for large-scale learning. This moment marked the beginning of the deep learning revolution, inspiring architectures like VGG, ResNet, and modern transformers.', 'raw': 'Context: AlexNet had a historic win in the 2012 ImageNet Large Scale Visual Recognition Challenge (ILSVRC), reducing error rate from 26% (previous best) to 15.3%. It’s a deep CNN with 8 layers (5 convolutional + 3 fully connected), pioneering the use of ReLU activations for faster training, dropout for regularization, and GPU acceleration for large-scale learning. This moment marked the beginning of the deep learning revolution, inspiring architectures like VGG, ResNet, and modern transformers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/computerhistory/AlexNet-Source-Code', 'raw': 'https://github.com/computerhistory/AlexNet-Source-Code'}]","Finally, the ground truth / AlexNet’s original source code is available to all. +Context: AlexNet had a historic win in the 2012 ImageNet Large Scale Visual Recognition Challenge (ILSVRC), reducing error rate from 26% (previous best) to 15.3%. It’s a deep CNN with 8 layers (5 convolutional + 3 fully connected), pioneering the use of ReLU activations for faster training, dropout for regularization, and GPU acceleration for large-scale learning. This moment marked the beginning of the deep learning revolution, inspiring architectures like VGG, ResNet, and modern transformers. +Code: https://github.com/computerhistory/AlexNet-Source-Code","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/VPp-9ok5yGGT8Xit7TNL5.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/bsaXED3zFztG3kgwkbYDB.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/twiJ6jUapHPTQGQih21nf.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/Jie9ghCvPfQNzrhBbQVEF.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'melekuk', 'huyzed', 'GoDjMike', 'xinnn63', 'dantezxcd'], 'count': 6}, {'reaction': '👍', 'users': ['lijiansheng', 'eaddario', 'Takugen', 'dantezxcd'], 'count': 4}]",2025-03-21 03:43:04,2025-03-21 03:43:04.544,[],/posts/Jaward/142231502952502,1779,"{'language': 'en', 'probability': 0.8306787610054016}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/Ixt-6rl27rnJmHRMRp71t.jpeg,1.0,Shihua Huang,ShihuaHuang,539551541054679,"[{'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'Our work, DEIM, is available on HF: https://huggingface.co/papers/2412.04234. The SoTA COCO real-time object detector.', 'raw': '```\nOur work, DEIM, is available on HF: https://huggingface.co/papers/2412.04234. The SoTA COCO real-time object detector.\n```'}, {'type': 'new_line', 'raw': '\n'}]"," +``` +Our work, DEIM, is available on HF: https://huggingface.co/papers/2412.04234. The SoTA COCO real-time object detector. +``` +",[],[],"[{'reaction': '👀', 'users': ['fuloo', 'John6666', 'YaTharThShaRma999', 'xinnn63', 'dantezxcd'], 'count': 5}]",2025-03-21 02:31:25,2025-03-21 02:31:25.215,[],/posts/ShihuaHuang/539551541054679,1456,"{'language': 'en', 'probability': 0.7355419397354126}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,812841775665112,"[{'type': 'text', 'value': 'FlexWorld 🔥 an open framework that generates 3D scenes from a single image! ', 'raw': 'FlexWorld 🔥 an open framework that generates 3D scenes from a single image! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'GSAI-ML/FlexWorld'}, 'url': 'https://huggingface.co/GSAI-ML/FlexWorld', 'raw': 'https://huggingface.co/GSAI-ML/FlexWorld'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.13265'}, 'url': 'https://huggingface.co/papers/2503.13265', 'raw': 'https://huggingface.co/papers/2503.13265', 'label': 'FlexWorld: Progressively Expanding 3D Scenes for Flexiable-View\n Synthesis (2503.13265)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 360° rotation & zooming ', 'raw': '✨ 360° rotation & zooming '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ High quality novel views powered by video-to-video diffusion model', 'raw': '✨ High quality novel views powered by video-to-video diffusion model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Progressive 3D expansion', 'raw': '✨ Progressive 3D expansion'}, {'type': 'new_line', 'raw': '\n'}]","FlexWorld 🔥 an open framework that generates 3D scenes from a single image! + +Model: https://huggingface.co/GSAI-ML/FlexWorld +Paper: https://huggingface.co/papers/2503.13265 + +✨ 360° rotation & zooming +✨ High quality novel views powered by video-to-video diffusion model +✨ Progressive 3D expansion +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/IMFFdgygBl9gv3BimtERy.mp4'}]",[],"[{'reaction': '👍', 'users': ['fuloo', 'John6666', 'merve', 'bevvy', 'xinnn63', 'dantezxcd', 'Lilys123'], 'count': 7}, {'reaction': '😎', 'users': ['Aurelien-Morgan', 'Takugen', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['nicoboss', 'dantezxcd'], 'count': 2}]",2025-03-20 19:51:04,2025-04-02 12:41:26.175,"[{'_id': '67e296358dd868113ffc5ffc', 'avatarUrl': '/avatars/f945949c4b3f1a77a405f1943fcb5080.svg', 'fullname': 'Lily Samuels', 'name': 'Lilys123', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '63a369d98c0c89dcae3b8329', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg', 'fullname': 'Adina Yakefu', 'name': 'AdinaY', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 774, 'isFollowing': False}]",/posts/AdinaY/812841775665112,2162,"{'language': 'en', 'probability': 0.7247273921966553}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/5kE1rvdIVfUftt7B__ysg.png,3.0,Thomas Tong,gtvracer,203705668925540,"[{'type': 'text', 'value': ""I'm getting this all of a sudden, even generated a new token but still get a 401. anyone else seeing this?"", 'raw': ""I'm getting this all of a sudden, even generated a new token but still get a 401. anyone else seeing this?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Exception:401 Client Error: Unauthorized for url: ', 'raw': 'Exception:401 Client Error: Unauthorized for url: '}, {'type': 'link', 'href': 'https://router.huggingface.co/hf-inference/models/meta-llama/Llama-3.2-3B-Instruct/v1/chat/completions', 'raw': 'https://router.huggingface.co/hf-inference/models/meta-llama/Llama-3.2-3B-Instruct/v1/chat/completions'}, {'type': 'text', 'value': ' (Request ID: Root=1-67dc6b20-3a4697761ad9315c06ca928a;d914bcf1-063a-4df2-acc2-8e0170ddccb3)', 'raw': ' (Request ID: Root=1-67dc6b20-3a4697761ad9315c06ca928a;d914bcf1-063a-4df2-acc2-8e0170ddccb3)'}, {'type': 'new_line', 'raw': '\n'}]","I'm getting this all of a sudden, even generated a new token but still get a 401. anyone else seeing this? +Exception:401 Client Error: Unauthorized for url: https://router.huggingface.co/hf-inference/models/meta-llama/Llama-3.2-3B-Instruct/v1/chat/completions (Request ID: Root=1-67dc6b20-3a4697761ad9315c06ca928a;d914bcf1-063a-4df2-acc2-8e0170ddccb3) +",[],[],"[{'reaction': '👀', 'users': ['John6666', 'xinnn63', 'Takugen', 'dantezxcd', 'MiTenorio'], 'count': 5}]",2025-03-20 19:32:52,2025-03-22 01:11:27.197,"[{'_id': '670b077235918e99fe9dfe88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/5kE1rvdIVfUftt7B__ysg.png', 'fullname': 'Thomas Tong', 'name': 'gtvracer', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/gtvracer/203705668925540,487,"{'language': 'en', 'probability': 0.5521977543830872}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,894580491700125,"[{'type': 'text', 'value': 'Play with Orpheus TTS, a Llama-based Speech-LLM designed for high-quality, empathetic text-to-speech generation. This model has been fine-tuned to deliver human-level speech synthesis 🔥🗣️', 'raw': 'Play with Orpheus TTS, a Llama-based Speech-LLM designed for high-quality, empathetic text-to-speech generation. This model has been fine-tuned to deliver human-level speech synthesis 🔥🗣️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉GitHub [ Demo ] : ', 'raw': '👉GitHub [ Demo ] : '}, {'type': 'link', 'href': 'https://github.com/PRITHIVSAKTHIUR/Orpheus-TTS-Edge', 'raw': 'https://github.com/PRITHIVSAKTHIUR/Orpheus-TTS-Edge'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo supporting both text-to-speech and text-to-llm responses in speech.', 'raw': 'Demo supporting both text-to-speech and text-to-llm responses in speech.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' > voice: tara, dan, emma, josh', 'raw': ' > voice: tara, dan, emma, josh'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> emotion: , , , , , , , .', 'raw': '> emotion: , , , , , , , .'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥠Orpheus-3b-0.1-ft ', 'raw': '🥠Orpheus-3b-0.1-ft '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model Page: ', 'raw': 'Model Page: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'canopylabs/orpheus-3b-0.1-ft'}, 'url': 'https://huggingface.co/canopylabs/orpheus-3b-0.1-ft', 'raw': 'https://huggingface.co/canopylabs/orpheus-3b-0.1-ft'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥠Orpheus-3b-0.1-ft', 'raw': '🥠Orpheus-3b-0.1-ft'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Colab Inference Notebook: ', 'raw': 'Colab Inference Notebook: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1KhXT56UePPUHhqitJNUxq63k-pQomz3N?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1KhXT56UePPUHhqitJNUxq63k-pQomz3N?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥠Finetune [ orpheus-3b-0.1-pretrained ]', 'raw': '🥠Finetune [ orpheus-3b-0.1-pretrained ]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resource: ', 'raw': 'Resource: '}, {'type': 'link', 'href': 'https://github.com/canopyai/Orpheus-TTS/tree/main/finetune', 'raw': 'https://github.com/canopyai/Orpheus-TTS/tree/main/finetune'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥠Model-releases:', 'raw': '🥠Model-releases:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://canopylabs.ai/model-releases', 'raw': 'https://canopylabs.ai/model-releases'}]","Play with Orpheus TTS, a Llama-based Speech-LLM designed for high-quality, empathetic text-to-speech generation. This model has been fine-tuned to deliver human-level speech synthesis 🔥🗣️ + +👉GitHub [ Demo ] : https://github.com/PRITHIVSAKTHIUR/Orpheus-TTS-Edge + +Demo supporting both text-to-speech and text-to-llm responses in speech. + + > voice: tara, dan, emma, josh +> emotion: , , , , , , , . + +🥠Orpheus-3b-0.1-ft +Model Page: https://huggingface.co/canopylabs/orpheus-3b-0.1-ft + +🥠Orpheus-3b-0.1-ft +Colab Inference Notebook: https://colab.research.google.com/drive/1KhXT56UePPUHhqitJNUxq63k-pQomz3N?usp=sharing + +🥠Finetune [ orpheus-3b-0.1-pretrained ] +Resource: https://github.com/canopyai/Orpheus-TTS/tree/main/finetune + +🥠Model-releases: +https://canopylabs.ai/model-releases","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/_fFSo5THGa8KNzJoCe727.mp4'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'Fishtiks', 'ameerazam08', 'Yukkkop', 'jaco-bro', 's0me-0ne', 'Mutar51', 'xinnn63', 'Takugen', 'dantezxcd'], 'count': 10}]",2025-03-20 17:08:52,2025-03-25 13:11:36.108,"[{'_id': '6289f6f56a2a449b99a794ba', 'avatarUrl': '/avatars/9741b6d6b9bb92c4277d2552b561747b.svg', 'fullname': 'bla bla', 'name': 'bla', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/prithivMLmods/894580491700125,2382,"{'language': 'en', 'probability': 0.6595438122749329}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,551641290499667,"[{'type': 'text', 'value': 'I like to benchmark 💵o1-pro💵 but it is way too expensive for me 🤦\u200d♂️', 'raw': 'I like to benchmark 💵o1-pro💵 but it is way too expensive for me 🤦\u200d♂️'}]",I like to benchmark ���o1-pro💵 but it is way too expensive for me 🤦‍♂️,[],[],"[{'reaction': '👀', 'users': ['John6666', 'NexThinkLabs', 'xinnn63', 'dantezxcd'], 'count': 4}, {'reaction': '😔', 'users': ['AtAndDev', 'azhiboedova', 'Takugen', 'dantezxcd'], 'count': 4}]",2025-03-20 17:08:31,2025-03-21 16:40:17.791,"[{'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '65708a0d670035a60722b4ad', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65708a0d670035a60722b4ad/wSc0Y_ibXL-baWExBpHZp.png', 'fullname': 'James Clarke', 'name': 'Impulse2000', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 11, 'isFollowing': False}]",/posts/onekq/551641290499667,1581,"{'language': 'en', 'probability': 0.9270561337471008}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,968928866217294,"[{'type': 'text', 'value': 'Should we assemble affordable open-source robots at Hugging Face for the community. Would you buy them? At what price?', 'raw': 'Should we assemble affordable open-source robots at Hugging Face for the community. Would you buy them? At what price?'}]",Should we assemble affordable open-source robots at Hugging Face for the community. Would you buy them? At what price?,[],[],"[{'reaction': '👀', 'users': ['benjamin-paine', 'AtAndDev', 'John6666', 'ngxson', 'mkozak', 'mrdbourke', 'Kullpar', 'midnight-snekk', 'punkreload', 'Chroma111', 'sugatoray', 'xinnn63', 'Takugen', 'dantezxcd'], 'count': 14}, {'reaction': '❤️', 'users': ['ngxson', 'acamilogg88', 'Chroma111', 'GrStant', 'burtenshaw', 'Smorty100', 'dantezxcd'], 'count': 7}, {'reaction': '🔥', 'users': ['on1onmangoes', 'ngxson', 'Chroma111', 'adityasihag', 'Smorty100', 'dantezxcd'], 'count': 6}, {'reaction': '🚀', 'users': ['Chroma111', 'adityasihag', 'dantezxcd'], 'count': 3}, {'reaction': '👍', 'users': ['Chroma111', 'Dragunflie-420', 'dantezxcd'], 'count': 3}, {'reaction': '🤗', 'users': ['Chroma111', 'dantezxcd'], 'count': 2}, {'reaction': '🤝', 'users': ['Chroma111', 'dantezxcd'], 'count': 2}, {'reaction': '➕', 'users': ['Chroma111', 'dantezxcd'], 'count': 2}]",2025-03-20 16:25:26,2025-03-24 07:57:14.963,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '63ca214abedad7e2bf1d1517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1674191139776-noauth.png', 'fullname': 'Xuan-Son Nguyen', 'name': 'ngxson', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 257, 'isFollowing': False}, {'_id': '64836b4d0087ddd1262f23fd', 'avatarUrl': '/avatars/1d7e2f5259c9a0a5a51d040a452ea23d.svg', 'fullname': 'Angel Camilo Guillen Guzman', 'name': 'acamilogg88', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65823de94653431901ca9523', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/h6lGv7rrFd8RjBfHdMgXP.png', 'fullname': 'Nirav Madhani', 'name': 'Nirav-Madhani', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '642cc1c253e76b4c2286c58e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/642cc1c253e76b4c2286c58e/fGtQ_QeTjUgBhIT89dpUt.jpeg', 'fullname': 'rombo dawg', 'name': 'rombodawg', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 248, 'isFollowing': False}, {'_id': '62d648291fa3e4e7ae3fa6e8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png', 'fullname': 'ben burtenshaw', 'name': 'burtenshaw', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3314, 'isFollowing': False}, {'_id': '67585020cf88bd7052cd7047', 'avatarUrl': '/avatars/c7e4979f04fda14b73a43c398ce7da27.svg', 'fullname': 'ZiggyStardust', 'name': 'ZiggyS', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/clem/968928866217294,3809,"{'language': 'en', 'probability': 0.9481398463249207}",8 +https://cdn-avatars.huggingface.co/v1/production/uploads/613b0a62a14099d5afed7830/pLuqSIYaNYhUqdjxlNrFn.png,127.0,Loïck BOURDOIS,lbourdois,751427866406946,"[{'type': 'text', 'value': 'We introduce FAT5 (Flash Attention T5) ⚡', 'raw': 'We introduce FAT5 (Flash Attention T5) ⚡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'An implementation of T5 in PyTorch with UL2 objective optimized for GPGPU for both training and inference thanks to 13 different optimizations. ', 'raw': 'An implementation of T5 in PyTorch with UL2 objective optimized for GPGPU for both training and inference thanks to 13 different optimizations. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The main one is that we have designed a CUDA kernel to expand the Flash Attention by ', 'raw': 'The main one is that we have designed a CUDA kernel to expand the Flash Attention by '}, {'type': 'mention', 'user': 'tridao', 'raw': '@tridao'}, {'type': 'text', 'value': ' with RPE biases and supports other PE such as RoPE, ALiBi or FIRE.', 'raw': ' with RPE biases and supports other PE such as RoPE, ALiBi or FIRE.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The result kernel is 2 times faster than a SPDA implementation. ', 'raw': 'The result kernel is 2 times faster than a SPDA implementation. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We also use Triton kernels to optimize certain parts of the architecture, such as the cross-entropy and RMSNorm layer.', 'raw': 'We also use Triton kernels to optimize certain parts of the architecture, such as the cross-entropy and RMSNorm layer.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The various kernels have been carefully built to be compatible with BF16 and torch.compile to go even faster and achieve efficient pretraining.', 'raw': 'The various kernels have been carefully built to be compatible with BF16 and torch.compile to go even faster and achieve efficient pretraining.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All other optimizations are described in a 📝 subsequent blog post available on ', 'raw': 'All other optimizations are described in a 📝 subsequent blog post available on '}, {'type': 'mention', 'user': 'huggingface', 'raw': '@huggingface'}, {'type': 'text', 'value': ' 🤗: ', 'raw': ' 🤗: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'CATIE-AQ/FAT5-report'}, 'url': 'https://huggingface.co/spaces/CATIE-AQ/FAT5-report', 'raw': 'https://huggingface.co/spaces/CATIE-AQ/FAT5-report'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This methodology enabled us to efficiently pretrain as a proof of concept a FAT5 with 147M parameters in French in a reasonable time (1,461H for 419B tokens), with limited resources (1 A100 i.e. a computational budget of ~ €1,900) and a low carbon footprint (13.5kg eq CO2).', 'raw': 'This methodology enabled us to efficiently pretrain as a proof of concept a FAT5 with 147M parameters in French in a reasonable time (1,461H for 419B tokens), with limited resources (1 A100 i.e. a computational budget of ~ €1,900) and a low carbon footprint (13.5kg eq CO2).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The model's weights are also available on Hugging Face: "", 'raw': ""The model's weights are also available on Hugging Face: ""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'CATIE-AQ/FAT5-small'}, 'url': 'https://huggingface.co/CATIE-AQ/FAT5-small', 'raw': 'https://huggingface.co/CATIE-AQ/FAT5-small'}, {'type': 'text', 'value': '. ', 'raw': '. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Not very useful in practice, it's a PoC and not an instructed model (it's planned for later)."", 'raw': ""Not very useful in practice, it's a PoC and not an instructed model (it's planned for later).""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All the code is available on GitHub if you want to pretrain your own model in your own language or for a specific domain: ', 'raw': 'All the code is available on GitHub if you want to pretrain your own model in your own language or for a specific domain: '}, {'type': 'link', 'href': 'https://github.com/catie-aq/flashT5', 'raw': 'https://github.com/catie-aq/flashT5'}, {'type': 'text', 'value': ' ⭐', 'raw': ' ⭐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ending by indicating that was a joint project with ', 'raw': 'Ending by indicating that was a joint project with '}, {'type': 'mention', 'user': 'BorisAlbar', 'raw': '@BorisAlbar'}, {'type': 'text', 'value': ' at hf.co/CATIE-AQ.', 'raw': ' at hf.co/CATIE-AQ.'}, {'type': 'new_line', 'raw': '\n'}]","We introduce FAT5 (Flash Attention T5) ⚡ + +An implementation of T5 in PyTorch with UL2 objective optimized for GPGPU for both training and inference thanks to 13 different optimizations. +The main one is that we have designed a CUDA kernel to expand the Flash Attention by @tridao with RPE biases and supports other PE such as RoPE, ALiBi or FIRE. +The result kernel is 2 times faster than a SPDA implementation. +We also use Triton kernels to optimize certain parts of the architecture, such as the cross-entropy and RMSNorm layer. + +The various kernels have been carefully built to be compatible with BF16 and torch.compile to go even faster and achieve efficient pretraining. + +All other optimizations are described in a 📝 subsequent blog post available on @huggingface 🤗: https://huggingface.co/spaces/CATIE-AQ/FAT5-report. + +This methodology enabled us to efficiently pretrain as a proof of concept a FAT5 with 147M parameters in French in a reasonable time (1,461H for 419B tokens), with limited resources (1 A100 i.e. a computational budget of ~ €1,900) and a low carbon footprint (13.5kg eq CO2). + +The model's weights are also available on Hugging Face: https://huggingface.co/CATIE-AQ/FAT5-small. +Not very useful in practice, it's a PoC and not an instructed model (it's planned for later). + +All the code is available on GitHub if you want to pretrain your own model in your own language or for a specific domain: https://github.com/catie-aq/flashT5 ⭐ + +Ending by indicating that was a joint project with @BorisAlbar at hf.co/CATIE-AQ. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/613b0a62a14099d5afed7830/T-nLNgODQ_VKVyL1ii6ea.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/613b0a62a14099d5afed7830/fxrT2QeJRSjDS9p7eI2wW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/613b0a62a14099d5afed7830/xbIoyGDpzw1vg8OSt9yCQ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/613b0a62a14099d5afed7830/2DYpXRKV6i6G1Hk6w0unZ.png'}]","[{'_id': '67167777496517fd6e8a2168', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/0z8zwpt8R0XVHBsBphER3.png', 'fullname': 'Boris Albar', 'name': 'BorisAlbar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '64b8a6b5cf14c2fabe98159b', 'avatarUrl': '/avatars/dbc009451865435bf290791beadc4723.svg', 'fullname': 'Tri Dao', 'name': 'tridao', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 84}]","[{'reaction': '❤️', 'users': ['Nomiia', 'John6666', 'Ayorinha', 'dillfrescott', 'reach-vb', 'cesar5514', 'Quazim0t0', 'xinnn63', 'dantezxcd', 'lyu-boxuan'], 'count': 10}, {'reaction': '🔥', 'users': ['Ayorinha', 'Nouxoid', 'dillfrescott', 'reach-vb', 'cesar5514', 'Takugen', 'dantezxcd', 'lyu-boxuan'], 'count': 8}]",2025-03-20 13:48:12,2025-03-20 13:48:12.458,[],/posts/lbourdois/751427866406946,2938,"{'language': 'en', 'probability': 0.9114298224449158}",0 +/avatars/fb866e3758189d70488fc6a879151f45.svg,21.0,Akihito Miyazaki,Akjava,930943007750043,"[{'type': 'text', 'value': ""I've shared Hugging Face Spaces for CPU-based RAG and T5/Flan-T5 models. The smolagents-rag space sometimes produces high-quality answers, but it can be slow. Qwen2.5-0.5B is as fast as a CPU implementation and generates answers of acceptable quality. I've found that Gemma3-4B produces significantly more stable answers than the 1B version."", 'raw': ""I've shared Hugging Face Spaces for CPU-based RAG and T5/Flan-T5 models. The smolagents-rag space sometimes produces high-quality answers, but it can be slow. Qwen2.5-0.5B is as fast as a CPU implementation and generates answers of acceptable quality. I've found that Gemma3-4B produces significantly more stable answers than the 1B version.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Rag', 'raw': 'Rag'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/Gemma3-4B-llamacpp-cpu-rag-smolagents'}, 'url': 'https://huggingface.co/spaces/Akjava/Gemma3-4B-llamacpp-cpu-rag-smolagents', 'raw': 'https://huggingface.co/spaces/Akjava/Gemma3-4B-llamacpp-cpu-rag-smolagents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/Qwen2.5-0.5B-Rag-Thinking-Flan-T5'}, 'url': 'https://huggingface.co/spaces/Akjava/Qwen2.5-0.5B-Rag-Thinking-Flan-T5', 'raw': 'https://huggingface.co/spaces/Akjava/Qwen2.5-0.5B-Rag-Thinking-Flan-T5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 't5/flan-t5', 'raw': 't5/flan-t5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/llamacpp-flan-t5-large-grammar-synthesis'}, 'url': 'https://huggingface.co/spaces/Akjava/llamacpp-flan-t5-large-grammar-synthesis', 'raw': 'https://huggingface.co/spaces/Akjava/llamacpp-flan-t5-large-grammar-synthesis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/llamacpp-madlad400-3b-mt-2jp'}, 'url': 'https://huggingface.co/spaces/Akjava/llamacpp-madlad400-3b-mt-2jp', 'raw': 'https://huggingface.co/spaces/Akjava/llamacpp-madlad400-3b-mt-2jp'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Huggingface Free CPU Limitations', 'raw': 'Huggingface Free CPU Limitations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'When duplicating a space, the build process(llama-cpp-python) can occasionally become stuck, requiring a manual restart to finish.', 'raw': 'When duplicating a space, the build process(llama-cpp-python) can occasionally become stuck, requiring a manual restart to finish.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Spaces may unexpectedly stop functioning or even be deleted, leading to the need to rework them. Refer to issue for more information.', 'raw': 'Spaces may unexpectedly stop functioning or even be deleted, leading to the need to rework them. Refer to issue for more information.'}]","I've shared Hugging Face Spaces for CPU-based RAG and T5/Flan-T5 models. The smolagents-rag space sometimes produces high-quality answers, but it can be slow. Qwen2.5-0.5B is as fast as a CPU implementation and generates answers of acceptable quality. I've found that Gemma3-4B produces significantly more stable answers than the 1B version. + +Rag +https://huggingface.co/spaces/Akjava/Gemma3-4B-llamacpp-cpu-rag-smolagents +https://huggingface.co/spaces/Akjava/Qwen2.5-0.5B-Rag-Thinking-Flan-T5 + +t5/flan-t5 +https://huggingface.co/spaces/Akjava/llamacpp-flan-t5-large-grammar-synthesis +https://huggingface.co/spaces/Akjava/llamacpp-madlad400-3b-mt-2jp + +Huggingface Free CPU Limitations +When duplicating a space, the build process(llama-cpp-python) can occasionally become stuck, requiring a manual restart to finish. +Spaces may unexpectedly stop functioning or even be deleted, leading to the need to rework them. Refer to issue for more information.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'xinnn63', 'Takugen', 'dantezxcd'], 'count': 4}]",2025-03-20 13:16:28,2025-03-20 13:16:28.735,[],/posts/Akjava/930943007750043,609,"{'language': 'en', 'probability': 0.8390635251998901}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,493536072420586,"[{'type': 'text', 'value': 'Prepared presets for Wan 2.1 for every model and GPU with modelscope / DiffSynth-Studio - Works with maximum speed as long as you are not using more than 2 GB VRAM - Compared BF16 vs FP8 as well', 'raw': 'Prepared presets for Wan 2.1 for every model and GPU with modelscope / DiffSynth-Studio - Works with maximum speed as long as you are not using more than 2 GB VRAM - Compared BF16 vs FP8 as well'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our app tutorial main : ', 'raw': 'Our app tutorial main : '}, {'type': 'link', 'href': 'https://youtu.be/hnAhveNy-8s', 'raw': 'https://youtu.be/hnAhveNy-8s'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2nd tutorial : ', 'raw': '2nd tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/ueMrzmbdWBg', 'raw': 'https://youtu.be/ueMrzmbdWBg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our App : ', 'raw': 'Our App : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/click-to-open-post-used-in-tutorial-123105403', 'raw': 'https://www.patreon.com/posts/click-to-open-post-used-in-tutorial-123105403'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also our App now has fully updated presets for every GPU both for BF16 and FP8 precision', 'raw': 'Also our App now has fully updated presets for every GPU both for BF16 and FP8 precision'}, {'type': 'new_line', 'raw': '\n'}]","Prepared presets for Wan 2.1 for every model and GPU with modelscope / DiffSynth-Studio - Works with maximum speed as long as you are not using more than 2 GB VRAM - Compared BF16 vs FP8 as well + +Our app tutorial main : https://youtu.be/hnAhveNy-8s + +2nd tutorial : https://youtu.be/ueMrzmbdWBg + +Our App : https://www.patreon.com/posts/click-to-open-post-used-in-tutorial-123105403 + +Also our App now has fully updated presets for every GPU both for BF16 and FP8 precision +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/psxYVrBlfOG76xLEgC__u.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/9cD3H7zQ-Ag6rgYkk0AAc.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/7o0_Krc2L0VhYGIVDKIx2.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/9MAZ3h9VSFo9j6IUg0e8d.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/lhlkPKcZl-A1VbHvHoH9c.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/2ilc0wBzMaZnerK8y3Zgq.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/FOBnXVn3T6eumj6S279X3.png'}]",[],"[{'reaction': '🚀', 'users': ['MonsterMMORPG', 'dantezxcd'], 'count': 2}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'dantezxcd'], 'count': 2}, {'reaction': '🔥', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-03-16 23:31:18,2025-03-16 23:31:18.863,[],/posts/MonsterMMORPG/493536072420586,1045,"{'language': 'en', 'probability': 0.8487642407417297}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d52e0c4e5642795617f668/ztXLrdFz3gkUJUIIQXfHo.png,56.0,Yanis L,Pendrokar,346825157154217,"[{'type': 'text', 'value': 'TTS Arena: Added the Spark-TTS model Space to the Arena Fork:', 'raw': 'TTS Arena: Added the Spark-TTS model Space to the Arena Fork:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 ', 'raw': '🏆 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Pendrokar/TTS-Spaces-Arena'}, 'url': 'https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena', 'raw': 'https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Spark-TTS ⚡: ', 'raw': ' Spark-TTS ⚡: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'thunnai/SparkTTS'}, 'url': 'https://huggingface.co/spaces/thunnai/SparkTTS', 'raw': 'https://huggingface.co/spaces/thunnai/SparkTTS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Rerouted Microsoft Edge TTS and XTTSv2 to have them back at the Arena. The Edge Space had Gradio API disabled, though a HF Space is not needed since it contacts a Microsoft server anyway. No clue how long this API will work. A ZeroGPU space is now used for XTTSv2.', 'raw': 'Rerouted Microsoft Edge TTS and XTTSv2 to have them back at the Arena. The Edge Space had Gradio API disabled, though a HF Space is not needed since it contacts a Microsoft server anyway. No clue how long this API will work. A ZeroGPU space is now used for XTTSv2.'}]","TTS Arena: Added the Spark-TTS model Space to the Arena Fork: +🏆 https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena + + Spark-TTS ⚡: https://huggingface.co/spaces/thunnai/SparkTTS + +Rerouted Microsoft Edge TTS and XTTSv2 to have them back at the Arena. The Edge Space had Gradio API disabled, though a HF Space is not needed since it contacts a Microsoft server anyway. No clue how long this API will work. A ZeroGPU space is now used for XTTSv2.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'krinal', 'dantezxcd'], 'count': 3}]",2025-03-16 22:49:57,2025-03-16 22:49:57.813,[],/posts/Pendrokar/346825157154217,2161,"{'language': 'en', 'probability': 0.8010351657867432}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,216238217580963,"[{'type': 'text', 'value': '200', 'raw': '200'}]",200,[],[],"[{'reaction': '🚀', 'users': ['nroggendorff', 'pepper13', 'av-codes', 'AtAndDev', 'dantezxcd'], 'count': 5}, {'reaction': '🤗', 'users': ['pepper13', 'Smorty100', 'AtAndDev', 'nroggendorff', 'dantezxcd'], 'count': 5}, {'reaction': '😎', 'users': ['John6666'], 'count': 1}, {'reaction': '🤯', 'users': ['Smorty100'], 'count': 1}]",2025-03-16 22:30:27,2025-03-20 19:50:45.305,"[{'_id': '63841f4559424581c36195f6', 'avatarUrl': '/avatars/cf9ed19a2b780160231f9e99e1c4f45b.svg', 'fullname': 'lance chen', 'name': 'virdel', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '646dcd89f813cfe153ef795d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646dcd89f813cfe153ef795d/XAR9bYyLwS73ThAEdkXIF.png', 'fullname': 'M K', 'name': 'Astronos', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '62120043963205aa3224b40b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1676804188806-62120043963205aa3224b40b.jpeg', 'fullname': 'Ivan Nikishev', 'name': 'dpe1', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '6428c6786268f155b77ea31e', 'avatarUrl': '/avatars/8fd45bfc2772fe318f7e47f09b8b7cb4.svg', 'fullname': 'Ivan Charapanau', 'name': 'av-codes', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}]",/posts/nroggendorff/216238217580963,3642,"{'language': 'no', 'probability': 0.10372088849544525}",8 +https://cdn-avatars.huggingface.co/v1/production/uploads/5fef4eb7770b06e11c2c6381/1NMdigjCGtn0yvQZSi5NJ.png,64.0,Alessandro Ercolani,giux78,524924454012414,"[{'type': 'text', 'value': '@ mii-llm with ', 'raw': '@ mii-llm with '}, {'type': 'mention', 'user': 'efederici', 'raw': '@efederici'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'mferraretto', 'raw': '@mferraretto'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'FinancialSupport', 'raw': '@FinancialSupport'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'DeepMount00', 'raw': '@DeepMount00'}, {'type': 'text', 'value': ' we just released #Propaganda a framework designed to evaluate and train LLMs on political opinions and bias. We aim to analyze both open-source and closed-source LLMs to understand the political positions and biases expressed in their outputs. Moreover we provide a set of recipes to enforce political positions into the models by creating ad hoc curated datasets and by applying fine tuning techniques. By releasing our work in the open, we hope to foster contributions: ', 'raw': ' we just released #Propaganda a framework designed to evaluate and train LLMs on political opinions and bias. We aim to analyze both open-source and closed-source LLMs to understand the political positions and biases expressed in their outputs. Moreover we provide a set of recipes to enforce political positions into the models by creating ad hoc curated datasets and by applying fine tuning techniques. By releasing our work in the open, we hope to foster contributions: '}, {'type': 'link', 'href': 'https://github.com/mii-llm/propaganda', 'raw': 'https://github.com/mii-llm/propaganda'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This framework offers opportunities for expansion in various directions and could become the standard reference for evaluating LLMs on political topics, particularly those that influence public opinion. ', 'raw': 'This framework offers opportunities for expansion in various directions and could become the standard reference for evaluating LLMs on political topics, particularly those that influence public opinion. '}]","@ mii-llm with @efederici @mferraretto @FinancialSupport and @DeepMount00 we just released #Propaganda a framework designed to evaluate and train LLMs on political opinions and bias. We aim to analyze both open-source and closed-source LLMs to understand the political positions and biases expressed in their outputs. Moreover we provide a set of recipes to enforce political positions into the models by creating ad hoc curated datasets and by applying fine tuning techniques. By releasing our work in the open, we hope to foster contributions: https://github.com/mii-llm/propaganda + +This framework offers opportunities for expansion in various directions and could become the standard reference for evaluating LLMs on political topics, particularly those that influence public opinion. ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fef4eb7770b06e11c2c6381/h4DCS7a-vzv-H0Q2D2a-h.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fef4eb7770b06e11c2c6381/0YNScJPBj3WZlgNOIR_3l.png'}]","[{'_id': '64f1bf6a8b550e875926a590', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f1bf6a8b550e875926a590/xdZHPQGdI2jISWcKhWTMQ.png', 'fullname': 'Michele Montebovi', 'name': 'DeepMount00', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 176}, {'_id': '612246596d9ce900691744d2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/612246596d9ce900691744d2/9DlHVQDqblKz7QPTA6nDa.jpeg', 'fullname': 'Edoardo Federici', 'name': 'efederici', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 44}, {'_id': '648cca46d38113f34bf7cb72', 'avatarUrl': '/avatars/0f95fe632948f826a0585d1adf541f78.svg', 'fullname': 'Samuele Colombo', 'name': 'FinancialSupport', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23}, {'_id': '6586deb4bbb04840e35b5dbb', 'avatarUrl': '/avatars/24921b3a2600e145e6fc968164b25b9c.svg', 'fullname': 'Mattia Ferraretto', 'name': 'mferraretto', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '❤️', 'users': ['marcuscedricridia', 'DeepMount00', 'giseldo', 'coffeepowered', 'dclipca', 'giux78', 'FinancialSupport', 'dantezxcd'], 'count': 8}, {'reaction': '👍', 'users': ['marcuscedricridia', 'dclipca', 'owao', 'dantezxcd'], 'count': 4}, {'reaction': '🔥', 'users': ['marcuscedricridia', 'jscordel', 'dclipca'], 'count': 3}, {'reaction': '🧠', 'users': ['marcuscedricridia', 'John6666', 'dclipca'], 'count': 3}, {'reaction': '👀', 'users': ['marcuscedricridia', 'dclipca'], 'count': 2}, {'reaction': '🤯', 'users': ['marcuscedricridia', 'dclipca'], 'count': 2}]",2025-03-16 21:44:56,2025-03-16 21:44:56.550,[],/posts/giux78/524924454012414,2895,"{'language': 'en', 'probability': 0.9336910843849182}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,624548696865407,"[{'type': 'text', 'value': '15 types of attention mechanisms', 'raw': '15 types of attention mechanisms'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Attention mechanisms allow models to dynamically focus on specific parts of their input when performing tasks. In our recent article, we discussed Multi-Head Latent Attention (MLA) in detail and now it's time to summarize other existing types of attention."", 'raw': ""Attention mechanisms allow models to dynamically focus on specific parts of their input when performing tasks. In our recent article, we discussed Multi-Head Latent Attention (MLA) in detail and now it's time to summarize other existing types of attention.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here is a list of 15 types of attention mechanisms used in AI models:', 'raw': 'Here is a list of 15 types of attention mechanisms used in AI models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Soft attention (Deterministic attention) -> ', 'raw': '1. Soft attention (Deterministic attention) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '1409.0473'}, 'url': 'https://huggingface.co/papers/1409.0473', 'raw': 'https://huggingface.co/papers/1409.0473', 'label': 'Neural Machine Translation by Jointly Learning to Align and Translate (1409.0473)'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Assigns a continuous weight distribution over all parts of the input. It produces a weighted sum of the input using attention weights that sum to 1.', 'raw': 'Assigns a continuous weight distribution over all parts of the input. It produces a weighted sum of the input using attention weights that sum to 1.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Hard attention (Stochastic attention) -> ', 'raw': '2. Hard attention (Stochastic attention) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '1508.04025'}, 'url': 'https://huggingface.co/papers/1508.04025', 'raw': 'https://huggingface.co/papers/1508.04025', 'label': 'Effective Approaches to Attention-based Neural Machine Translation (1508.04025)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Makes a discrete selection of some part of the input to focus on at each step, rather than attending to everything. ', 'raw': 'Makes a discrete selection of some part of the input to focus on at each step, rather than attending to everything. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Self-attention -> ', 'raw': '3. Self-attention -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '1706.03762'}, 'url': 'https://huggingface.co/papers/1706.03762', 'raw': 'https://huggingface.co/papers/1706.03762', 'label': 'Attention Is All You Need (1706.03762)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Each element in the sequence ""looks"" at other elements and ""decides"" how much to borrow from each of them for its new representation.', 'raw': 'Each element in the sequence ""looks"" at other elements and ""decides"" how much to borrow from each of them for its new representation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Cross-Attention (Encoder-Decoder attention) -> ', 'raw': '4. Cross-Attention (Encoder-Decoder attention) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2104.08771'}, 'url': 'https://huggingface.co/papers/2104.08771', 'raw': 'https://huggingface.co/papers/2104.08771', 'label': 'Cross-Attention is All You Need: Adapting Pretrained Transformers for\n Machine Translation (2104.08771)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The queries come from one sequence and the keys/values come from another sequence. It allows a model to combine information from two different sources.', 'raw': 'The queries come from one sequence and the keys/values come from another sequence. It allows a model to combine information from two different sources.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. Multi-Head Attention (MHA) -> ', 'raw': '5. Multi-Head Attention (MHA) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '1706.03762'}, 'url': 'https://huggingface.co/papers/1706.03762', 'raw': 'https://huggingface.co/papers/1706.03762', 'label': 'Attention Is All You Need (1706.03762)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Multiple attention “heads” are run in parallel.\u200b The model computes several attention distributions (heads), each with its own set of learned projections of queries, keys, and values. ', 'raw': 'Multiple attention “heads” are run in parallel.\u200b The model computes several attention distributions (heads), each with its own set of learned projections of queries, keys, and values. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. Multi-Head Latent Attention (MLA) -> ', 'raw': '6. Multi-Head Latent Attention (MLA) -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.04434'}, 'url': 'https://huggingface.co/papers/2405.04434', 'raw': 'https://huggingface.co/papers/2405.04434', 'label': 'DeepSeek-V2: A Strong, Economical, and Efficient Mixture-of-Experts\n Language Model (2405.04434)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Extends MHA by incorporating a latent space where attention heads can dynamically learn different latent factors or representations.', 'raw': 'Extends MHA by incorporating a latent space where attention heads can dynamically learn different latent factors or representations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7. Memory-Based attention -> ', 'raw': '7. Memory-Based attention -> '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '1503.08895'}, 'url': 'https://huggingface.co/papers/1503.08895', 'raw': 'https://huggingface.co/papers/1503.08895', 'label': 'End-To-End Memory Networks (1503.08895)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Involves an external memory and uses attention to read from and write to this memory.', 'raw': 'Involves an external memory and uses attention to read from and write to this memory.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'See other types in the comments 👇', 'raw': 'See other types in the comments 👇'}]","15 types of attention mechanisms + +Attention mechanisms allow models to dynamically focus on specific parts of their input when performing tasks. In our recent article, we discussed Multi-Head Latent Attention (MLA) in detail and now it's time to summarize other existing types of attention. + +Here is a list of 15 types of attention mechanisms used in AI models: + +1. Soft attention (Deterministic attention) -> https://huggingface.co/papers/1409.0473 +Assigns a continuous weight distribution over all parts of the input. It produces a weighted sum of the input using attention weights that sum to 1. + +2. Hard attention (Stochastic attention) -> https://huggingface.co/papers/1508.04025 +Makes a discrete selection of some part of the input to focus on at each step, rather than attending to everything. + +3. Self-attention -> https://huggingface.co/papers/1706.03762 +Each element in the sequence ""looks"" at other elements and ""decides"" how much to borrow from each of them for its new representation. + +4. Cross-Attention (Encoder-Decoder attention) -> https://huggingface.co/papers/2104.08771 +The queries come from one sequence and the keys/values come from another sequence. It allows a model to combine information from two different sources. + +5. Multi-Head Attention (MHA) -> https://huggingface.co/papers/1706.03762 +Multiple attention “heads” are run in parallel.​ The model computes several attention distributions (heads), each with its own set of learned projections of queries, keys, and values. + +6. Multi-Head Latent Attention (MLA) -> https://huggingface.co/papers/2405.04434 +Extends MHA by incorporating a latent space where attention heads can dynamically learn different latent factors or representations. + +7. Memory-Based attention -> https://huggingface.co/papers/1503.08895 +Involves an external memory and uses attention to read from and write to this memory. + +See other types in the comments 👇",[],[],"[{'reaction': '🔥', 'users': ['benjamin-paine', 'John6666', 'jayan12k', 'parjun', 'AdinaY', 'mkurman', 'alyona0l', 'J4BEZ', 'marcuscedricridia', 'mrunmaymore', 'BrigitteTousi', 'cjerzak', 'knight7561', 'inflatebot', 'Siddharth899', 'thiru42', 'pomeo92', 'yakito', 'KnutJaegersberg', 'Licho', 'dantezxcd', 'tianchu', 'yohn-maistre'], 'count': 23}, {'reaction': '👍', 'users': ['krinal', 'zyh1690', 'rreed', 'pradiptadeb90', 'caesar0301', 'dantezxcd'], 'count': 6}, {'reaction': '➕', 'users': ['Mindweller', 'Emaad', 'caesar0301'], 'count': 3}, {'reaction': '🚀', 'users': ['minhnguyent546', 'meigel'], 'count': 2}, {'reaction': '👀', 'users': ['midnight-snekk'], 'count': 1}]",2025-03-16 13:12:52,2025-03-16 13:13:19.220,"[{'_id': '64838b28c235ef76b63e4999', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg', 'fullname': 'Ksenia Se', 'name': 'Kseniase', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 971, 'isFollowing': False}]",/posts/Kseniase/624548696865407,7941,"{'language': 'en', 'probability': 0.8720798492431641}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64edcaf8b692b67db135919d/AvbAablqqbnL-6VfdH0Rj.png,20.0,David G. R.,reddgr,209978700246376,"[{'type': 'text', 'value': ""The latest Space I'm working on. A UI for browsing, searching, and annotating one million chatbot chats from "", 'raw': ""The latest Space I'm working on. A UI for browsing, searching, and annotating one million chatbot chats from ""}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'lmsys/lmsys-chat-1m'}, 'url': 'https://huggingface.co/datasets/lmsys/lmsys-chat-1m', 'raw': 'https://huggingface.co/datasets/lmsys/lmsys-chat-1m'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'reddgr/chatbot-arena-dataset-wrapper'}, 'url': 'https://huggingface.co/spaces/reddgr/chatbot-arena-dataset-wrapper', 'raw': 'https://huggingface.co/spaces/reddgr/chatbot-arena-dataset-wrapper'}]","The latest Space I'm working on. A UI for browsing, searching, and annotating one million chatbot chats from https://huggingface.co/datasets/lmsys/lmsys-chat-1m + +https://huggingface.co/spaces/reddgr/chatbot-arena-dataset-wrapper",[],[],"[{'reaction': '👍', 'users': ['John6666', 'hanzla', 'dantezxcd'], 'count': 3}]",2025-03-16 12:19:22,2025-03-16 12:19:22.492,[],/posts/reddgr/209978700246376,1003,"{'language': 'en', 'probability': 0.7125454545021057}",0 +/avatars/937a64aea8fde2f41a065f052b39f409.svg,48.0,alkinun,AtAndDev,784658782191587,"[{'type': 'text', 'value': 'There seems to multiple paid apps shared here that are based on models on hf, but some ppl sell their wrappers as ""products"" and promote them here. For a long time, hf was the best and only platform to do oss model stuff but with the recent AI website builders anyone can create a product (really crappy ones btw) and try to sell it with no contribution to oss stuff. Please dont do this, or try finetuning the models you use...', 'raw': 'There seems to multiple paid apps shared here that are based on models on hf, but some ppl sell their wrappers as ""products"" and promote them here. For a long time, hf was the best and only platform to do oss model stuff but with the recent AI website builders anyone can create a product (really crappy ones btw) and try to sell it with no contribution to oss stuff. Please dont do this, or try finetuning the models you use...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sorry for filling yall feed with this bs but yk...', 'raw': 'Sorry for filling yall feed with this bs but yk...'}]","There seems to multiple paid apps shared here that are based on models on hf, but some ppl sell their wrappers as ""products"" and promote them here. For a long time, hf was the best and only platform to do oss model stuff but with the recent AI website builders anyone can create a product (really crappy ones btw) and try to sell it with no contribution to oss stuff. Please dont do this, or try finetuning the models you use... +Sorry for filling yall feed with this bs but yk...",[],[],"[{'reaction': '😔', 'users': ['John6666', 'peskyduck', 'Ma121', 'fffiloni', '762spoooky', 'yakito', 'dantezxcd'], 'count': 7}, {'reaction': '👍', 'users': ['Quazim0t0', 'Vinzou', 'peskyduck', 'gordy12gg', 'bndp', 'nbsp'], 'count': 6}, {'reaction': '🔥', 'users': ['JLouisBiz', 'hanzla'], 'count': 2}]",2025-03-16 11:15:23,2025-03-23 12:26:41.481,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}]",/posts/AtAndDev/784658782191587,4324,"{'language': 'en', 'probability': 0.9686886072158813}",6 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,533812818810820,"[{'type': 'text', 'value': '📢 Several weeks ago Microsoft announced Phi-4. My most-recent list of LLM models have had only wrapper for Phi-2, so it was time to update! With this post, happy to share that Phi-4 wrapper is now available at nlp-thirdgate for adopting Chain-of-Thought reasoning:', 'raw': '📢 Several weeks ago Microsoft announced Phi-4. My most-recent list of LLM models have had only wrapper for Phi-2, so it was time to update! With this post, happy to share that Phi-4 wrapper is now available at nlp-thirdgate for adopting Chain-of-Thought reasoning:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 ', 'raw': '🤖 '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/llm/transformers_phi4.py', 'raw': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/llm/transformers_phi4.py'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📒 ', 'raw': '📒 '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/tutorials/llm_phi4.py', 'raw': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/tutorials/llm_phi4.py'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Findings on adaptation: I was able to reproduce only the pipeline based model launching. This version is for textual llm only. Microsoft also released multimodal Phi-4 which is out of scope of this wrapper.', 'raw': 'Findings on adaptation: I was able to reproduce only the pipeline based model launching. This version is for textual llm only. Microsoft also released multimodal Phi-4 which is out of scope of this wrapper.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌌 nlp-thirdgate: ', 'raw': '🌌 nlp-thirdgate: '}, {'type': 'link', 'href': 'https://lnkd.in/ef-wBnNn', 'raw': 'https://lnkd.in/ef-wBnNn'}]","📢 Several weeks ago Microsoft announced Phi-4. My most-recent list of LLM models have had only wrapper for Phi-2, so it was time to update! With this post, happy to share that Phi-4 wrapper is now available at nlp-thirdgate for adopting Chain-of-Thought reasoning: + +🤖 https://github.com/nicolay-r/nlp-thirdgate/blob/master/llm/transformers_phi4.py + +📒 https://github.com/nicolay-r/nlp-thirdgate/blob/master/tutorials/llm_phi4.py + +Findings on adaptation: I was able to reproduce only the pipeline based model launching. This version is for textual llm only. Microsoft also released multimodal Phi-4 which is out of scope of this wrapper. + +🌌 nlp-thirdgate: https://lnkd.in/ef-wBnNn","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/PIAMNqO5kja7Fyx1cXi5S.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-03-16 07:42:27,2025-03-16 07:42:27.486,[],/posts/nicolay-r/533812818810820,674,"{'language': 'en', 'probability': 0.9268249869346619}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,890797314182369,"[{'type': 'text', 'value': 'Common formula to DIY a LLM:', 'raw': 'Common formula to DIY a LLM:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Post train a Qwen model with a dataset distilled from DeepSeek 😂', 'raw': 'Post train a Qwen model with a dataset distilled from DeepSeek 😂'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Common formula to DIY a LLM: + +Post train a Qwen model with a dataset distilled from DeepSeek 😂 + +",[],[],"[{'reaction': '🤗', 'users': ['John6666', 'SpyC0der77', 'asvs', 'dantezxcd'], 'count': 4}, {'reaction': '😎', 'users': ['robb-0', 'dantezxcd'], 'count': 2}]",2025-03-16 02:54:55,2025-03-17 17:49:30.015,"[{'_id': '663a18c876e6d5b98f3cc94d', 'avatarUrl': '/avatars/faa2a47dbdd3bce6d47e0fbed6170627.svg', 'fullname': 'Alan Tseng', 'name': 'agentlans', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 31, 'isFollowing': False}, {'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/onekq/890797314182369,2485,"{'language': 'en', 'probability': 0.9111917614936829}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1678589663024-640d3eaa3623f6a56dde856d.jpeg,27.0,vansin,vansin,329603150202451,"[{'type': 'text', 'value': '🔥MedAgentBench Amazing Work🚀', 'raw': '🔥MedAgentBench Amazing Work🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just explored #MedAgentBench from ', 'raw': 'Just explored #MedAgentBench from '}, {'type': 'mention', 'user': 'Yale', 'raw': '@Yale'}, {'type': 'text', 'value': "" researchers and it's mind-blowing! They've created a cutting-edge benchmark that finally exposes the true capabilities of LLMs in complex medical reasoning."", 'raw': "" researchers and it's mind-blowing! They've created a cutting-edge benchmark that finally exposes the true capabilities of LLMs in complex medical reasoning.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡ Key discoveries:', 'raw': '⚡ Key discoveries:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DeepSeek R1 & OpenAI O3 dominate clinical reasoning tasks', 'raw': 'DeepSeek R1 & OpenAI O3 dominate clinical reasoning tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Agent-based frameworks deliver exceptional performance-cost balance', 'raw': 'Agent-based frameworks deliver exceptional performance-cost balance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Open-source alternatives are closing the gap at fraction of the cost', 'raw': 'Open-source alternatives are closing the gap at fraction of the cost'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This work shatters previous benchmarks that failed to challenge today's advanced models."", 'raw': ""This work shatters previous benchmarks that failed to challenge today's advanced models.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The future of medical AI is here: ', 'raw': 'The future of medical AI is here: '}, {'type': 'link', 'href': 'https://github.com/gersteinlab/medagents-benchmark', 'raw': 'https://github.com/gersteinlab/medagents-benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#MedicalAI #MachineLearning #AIinHealthcare 🔥', 'raw': '#MedicalAI #MachineLearning #AIinHealthcare 🔥'}]","🔥MedAgentBench Amazing Work🚀 + +Just explored #MedAgentBench from @Yale researchers and it's mind-blowing! They've created a cutting-edge benchmark that finally exposes the true capabilities of LLMs in complex medical reasoning. + +⚡ Key discoveries: + +DeepSeek R1 & OpenAI O3 dominate clinical reasoning tasks +Agent-based frameworks deliver exceptional performance-cost balance +Open-source alternatives are closing the gap at fraction of the cost + +This work shatters previous benchmarks that failed to challenge today's advanced models. +The future of medical AI is here: https://github.com/gersteinlab/medagents-benchmark +#MedicalAI #MachineLearning #AIinHealthcare 🔥","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/640d3eaa3623f6a56dde856d/MrOUqw6ECWqeqCuTuEmUY.png'}]","[{'_id': '62acc09da9513b87749a130a', 'avatarUrl': '/avatars/fdebd24c88c0c4c52e4a63fb15902f8f.svg', 'fullname': 'Yale Spector', 'name': 'yale', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '❤️', 'users': ['allproai', 'Makar7', 'AtAndDev', 'John6666', 'erdo-enes', 'Omarrran', 'JQ1984', 'dantezxcd'], 'count': 8}, {'reaction': '🔥', 'users': ['wasabiyummy', 'Makar7', 'AtAndDev', 'dantezxcd'], 'count': 4}, {'reaction': '🤗', 'users': ['Omarrran'], 'count': 1}]",2025-03-16 00:12:31,2025-03-16 00:12:31.988,[],/posts/vansin/329603150202451,3517,"{'language': 'en', 'probability': 0.8584046959877014}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg,29.0,Sk md saad amin,Reality123b,499868923826628,"[{'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'made a new model ', 'raw': 'made a new model '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'can anyone please benchmark it?', 'raw': 'can anyone please benchmark it?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Lap1official/Xylaria-1.8'}, 'url': 'https://huggingface.co/Lap1official/Xylaria-1.8', 'raw': 'https://huggingface.co/Lap1official/Xylaria-1.8'}]"," +made a new model +can anyone please benchmark it? + + +https://huggingface.co/Lap1official/Xylaria-1.8",[],[],"[{'reaction': '👍', 'users': ['JLouisBiz', 'ebisuke', 'dantezxcd'], 'count': 3}, {'reaction': '👀', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-03-12 16:25:47,2025-03-19 05:20:46.685,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '66fe8fb27d722f0879b4631f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg', 'fullname': 'Sk md saad amin', 'name': 'Reality123b', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}]",/posts/Reality123b/499868923826628,1285,"{'language': 'en', 'probability': 0.8767572641372681}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png,3314.0,ben burtenshaw,burtenshaw,548205593122339,"[{'type': 'text', 'value': 'Here’s a notebook to make Gemma reason with GRPO & TRL. I made this whilst prepping the next unit of the reasoning course:', 'raw': 'Here’s a notebook to make Gemma reason with GRPO & TRL. I made this whilst prepping the next unit of the reasoning course:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this notebooks I combine together google’s model with some community tooling', 'raw': 'In this notebooks I combine together google’s model with some community tooling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- First, I load the model from the Hugging Face hub with transformers’s latest release for Gemma 3', 'raw': '- First, I load the model from the Hugging Face hub with transformers’s latest release for Gemma 3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- I use PEFT and bitsandbytes to get it running on Colab', 'raw': '- I use PEFT and bitsandbytes to get it running on Colab'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Then, I took Will Browns processing and reward functions to make reasoning chains from GSM8k', 'raw': '- Then, I took Will Browns processing and reward functions to make reasoning chains from GSM8k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Finally, I used TRL’s GRPOTrainer to train the model', 'raw': '- Finally, I used TRL’s GRPOTrainer to train the model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Next step is to bring Unsloth AI in, then ship it in the reasoning course. Links to notebook below.', 'raw': 'Next step is to bring Unsloth AI in, then ship it in the reasoning course. Links to notebook below.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1Vkl69ytCS3bvOtV9_stRETMthlQXR4wX?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1Vkl69ytCS3bvOtV9_stRETMthlQXR4wX?usp=sharing'}]","Here’s a notebook to make Gemma reason with GRPO & TRL. I made this whilst prepping the next unit of the reasoning course: + +In this notebooks I combine together google’s model with some community tooling + +- First, I load the model from the Hugging Face hub with transformers’s latest release for Gemma 3 +- I use PEFT and bitsandbytes to get it running on Colab +- Then, I took Will Browns processing and reward functions to make reasoning chains from GSM8k +- Finally, I used TRL’s GRPOTrainer to train the model + +Next step is to bring Unsloth AI in, then ship it in the reasoning course. Links to notebook below. + +https://colab.research.google.com/drive/1Vkl69ytCS3bvOtV9_stRETMthlQXR4wX?usp=sharing",[],[],"[{'reaction': '👍', 'users': ['AGenchev', 'ritvik77', 'AtAndDev', 'sugatoray', 'jack729', 'xi0v', 'dantezxcd'], 'count': 7}, {'reaction': '🤗', 'users': ['John6666', 'AtAndDev', 'ZennyKenny', 'xi0v', 'dantezxcd'], 'count': 5}, {'reaction': '❤️', 'users': ['salym', 'dantezxcd'], 'count': 2}, {'reaction': '🔥', 'users': ['xi0v'], 'count': 1}]",2025-03-12 16:04:26,2025-03-19 15:20:16.813,"[{'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '676c67da7bad1587f2d046e2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/676c67da7bad1587f2d046e2/Y-aJmRj9FB49n36X1qKo9.jpeg', 'fullname': 'Akhil Theerthala', 'name': 'Akhil-Theerthala', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}, {'_id': '656e3808d4de03a07d116850', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg', 'fullname': 'Kenneth Hamilton', 'name': 'ZennyKenny', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 77, 'isFollowing': False}, {'_id': '660aa0fa59f37491ddd9c0f0', 'avatarUrl': '/avatars/4aee92f3b2320c88fe59e002e91a1327.svg', 'fullname': 'chenk-ai', 'name': 'chenk-ai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/burtenshaw/548205593122339,2168,"{'language': 'en', 'probability': 0.8596261739730835}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857746553-5df7e9e5da6d0311fd3d53f9.jpeg,1173.0,Thomas Wolf,thomwolf,597591144299421,"[{'type': 'text', 'value': ""We've kept pushing our Open-R1 project, an open initiative to replicate and extend the techniques behind DeepSeek-R1."", 'raw': ""We've kept pushing our Open-R1 project, an open initiative to replicate and extend the techniques behind DeepSeek-R1.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""And even we were mind-blown by the results we got with this latest model we're releasing: ⚡️OlympicCoder ("", 'raw': ""And even we were mind-blown by the results we got with this latest model we're releasing: ⚡️OlympicCoder (""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'open-r1/OlympicCoder-7B'}, 'url': 'https://huggingface.co/open-r1/OlympicCoder-7B', 'raw': 'https://huggingface.co/open-r1/OlympicCoder-7B'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'open-r1/OlympicCoder-32B'}, 'url': 'https://huggingface.co/open-r1/OlympicCoder-32B', 'raw': 'https://huggingface.co/open-r1/OlympicCoder-32B'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's beating Claude 3.7 on (competitive) programming –a domain Anthropic has been historically really strong at– and it's getting close to o1-mini/R1 on olympiad level coding with just 7B parameters!"", 'raw': ""It's beating Claude 3.7 on (competitive) programming –a domain Anthropic has been historically really strong at– and it's getting close to o1-mini/R1 on olympiad level coding with just 7B parameters!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""And the best part is that we're open-sourcing all about its training dataset, the new IOI benchmark, and more in our Open-R1 progress report #3: "", 'raw': ""And the best part is that we're open-sourcing all about its training dataset, the new IOI benchmark, and more in our Open-R1 progress report #3: ""}, {'type': 'link', 'href': 'https://huggingface.co/blog/open-r1/update-3', 'raw': 'https://huggingface.co/blog/open-r1/update-3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Datasets are are releasing:', 'raw': 'Datasets are are releasing:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/codeforces'}, 'url': 'https://huggingface.co/datasets/open-r1/codeforces', 'raw': 'https://huggingface.co/datasets/open-r1/codeforces'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/codeforces-cots'}, 'url': 'https://huggingface.co/datasets/open-r1/codeforces-cots', 'raw': 'https://huggingface.co/datasets/open-r1/codeforces-cots'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/ioi'}, 'url': 'https://huggingface.co/datasets/open-r1/ioi', 'raw': 'https://huggingface.co/datasets/open-r1/ioi'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/ioi-test-cases'}, 'url': 'https://huggingface.co/datasets/open-r1/ioi-test-cases', 'raw': 'https://huggingface.co/datasets/open-r1/ioi-test-cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/ioi-sample-solutions'}, 'url': 'https://huggingface.co/datasets/open-r1/ioi-sample-solutions', 'raw': 'https://huggingface.co/datasets/open-r1/ioi-sample-solutions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/ioi-cots'}, 'url': 'https://huggingface.co/datasets/open-r1/ioi-cots', 'raw': 'https://huggingface.co/datasets/open-r1/ioi-cots'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'open-r1/ioi-2024-model-solutions'}, 'url': 'https://huggingface.co/datasets/open-r1/ioi-2024-model-solutions', 'raw': 'https://huggingface.co/datasets/open-r1/ioi-2024-model-solutions'}]","We've kept pushing our Open-R1 project, an open initiative to replicate and extend the techniques behind DeepSeek-R1. + +And even we were mind-blown by the results we got with this latest model we're releasing: ⚡️OlympicCoder (https://huggingface.co/open-r1/OlympicCoder-7B and https://huggingface.co/open-r1/OlympicCoder-32B) + +It's beating Claude 3.7 on (competitive) programming –a domain Anthropic has been historically really strong at– and it's getting close to o1-mini/R1 on olympiad level coding with just 7B parameters! + +And the best part is that we're open-sourcing all about its training dataset, the new IOI benchmark, and more in our Open-R1 progress report #3: https://huggingface.co/blog/open-r1/update-3 + +Datasets are are releasing: +- https://huggingface.co/datasets/open-r1/codeforces +- https://huggingface.co/datasets/open-r1/codeforces-cots +- https://huggingface.co/datasets/open-r1/ioi +- https://huggingface.co/datasets/open-r1/ioi-test-cases +- https://huggingface.co/datasets/open-r1/ioi-sample-solutions +- https://huggingface.co/datasets/open-r1/ioi-cots +- https://huggingface.co/datasets/open-r1/ioi-2024-model-solutions","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5df7e9e5da6d0311fd3d53f9/sSVIH_RTYDU4B64aI8W8j.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['AdinaY', 'KingNish', 'BrigitteTousi', 'John6666', 'luciaquirke', 'AtAndDev', 'Makar7', 'mjsolidarios', 'lathashree01', 'stzhao'], 'count': 10}, {'reaction': '🚀', 'users': ['AdinaY', 'Quazim0t0', 'BrigitteTousi', 'Aurelien-Morgan', 'luciaquirke', 'AtAndDev', 'gn00029914', 'dantezxcd'], 'count': 8}, {'reaction': '❤️', 'users': ['Csplk', 'dantezxcd'], 'count': 2}]",2025-03-12 13:58:32,2025-03-12 14:01:13.633,[],/posts/thomwolf/597591144299421,2966,"{'language': 'en', 'probability': 0.8372530937194824}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/679b8aaaa4583bfdb4a89861/zS972g-ciRUyBPjrBhlxH.png,12.0,Lun Zima,Lunzima,985171161718711,"[{'type': 'text', 'value': ""I'm currently experimenting with the SFT dataset "", 'raw': ""I'm currently experimenting with the SFT dataset ""}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Lunzima/alpaca_like_dataset'}, 'url': 'https://huggingface.co/datasets/Lunzima/alpaca_like_dataset', 'raw': 'https://huggingface.co/datasets/Lunzima/alpaca_like_dataset'}, {'type': 'text', 'value': "" to further boost the performance of NQLSG-Qwen2.5-14B-MegaFusion-v9.x. This includes data sourced from DeepSeek-R1 or other cleaned results (excluding CoTs). Additionally, datasets that could potentially enhance the model's performance in math and programming/code, as well as those dedicated to specific uses like Swahili, are part of the mix. "", 'raw': "" to further boost the performance of NQLSG-Qwen2.5-14B-MegaFusion-v9.x. This includes data sourced from DeepSeek-R1 or other cleaned results (excluding CoTs). Additionally, datasets that could potentially enhance the model's performance in math and programming/code, as well as those dedicated to specific uses like Swahili, are part of the mix. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'sometimesanotion', 'raw': '@sometimesanotion'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'sthenno', 'raw': '@sthenno'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'wanlige', 'raw': '@wanlige'}]","I'm currently experimenting with the SFT dataset https://huggingface.co/datasets/Lunzima/alpaca_like_dataset to further boost the performance of NQLSG-Qwen2.5-14B-MegaFusion-v9.x. This includes data sourced from DeepSeek-R1 or other cleaned results (excluding CoTs). Additionally, datasets that could potentially enhance the model's performance in math and programming/code, as well as those dedicated to specific uses like Swahili, are part of the mix. +@sometimesanotion @sthenno @wanlige",[],"[{'_id': '665fef5a4794222f6a2fe605', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/665fef5a4794222f6a2fe605/sUF9NsMRNxdiEYKJbIbCk.jpeg', 'fullname': 'sometimesanotion', 'name': 'sometimesanotion', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 103}, {'_id': '66f889e35144a8d0c68b8078', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66f889e35144a8d0c68b8078/_aVcjIFcD6VD1N4mqog65.jpeg', 'fullname': 'Sthenno', 'name': 'sthenno', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 30}, {'_id': '6793067db040b0997803dfea', 'avatarUrl': '/avatars/5f36df9bfde529a09673a958cbb75462.svg', 'fullname': 'gewanli', 'name': 'wanlige', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 15}]","[{'reaction': '🚀', 'users': ['Quazim0t0', 'John6666', 'dantezxcd'], 'count': 3}, {'reaction': '👍', 'users': ['wanlige', 'dantezxcd'], 'count': 2}]",2025-03-12 13:47:53,2025-03-12 14:04:22.525,"[{'_id': '679b8aaaa4583bfdb4a89861', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/679b8aaaa4583bfdb4a89861/zS972g-ciRUyBPjrBhlxH.png', 'fullname': 'Lun Zima', 'name': 'Lunzima', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}]",/posts/Lunzima/985171161718711,1342,"{'language': 'en', 'probability': 0.903853714466095}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg,2185.0,Hafedh Hichri,not-lain,517384672532857,"[{'type': 'text', 'value': '🚀AraClip is now fully integrated with Hugging Face 🤗', 'raw': '🚀AraClip is now fully integrated with Hugging Face 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AraClip is a specialized CLIP model that was created by ', 'raw': 'AraClip is a specialized CLIP model that was created by '}, {'type': 'mention', 'user': 'pain', 'raw': '@pain'}, {'type': 'text', 'value': ' and optimized for Arabic text-image retrieval tasks🔥', 'raw': ' and optimized for Arabic text-image retrieval tasks🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Try it out 🔗', 'raw': '🔗 Try it out 🔗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 model: ', 'raw': '🤖 model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Arabic-Clip/araclip'}, 'url': 'https://huggingface.co/Arabic-Clip/araclip', 'raw': 'https://huggingface.co/Arabic-Clip/araclip'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧩 Gradio demo: ', 'raw': '🧩 Gradio demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Arabic-Clip/Araclip-Simplified'}, 'url': 'https://huggingface.co/spaces/Arabic-Clip/Araclip-Simplified', 'raw': 'https://huggingface.co/spaces/Arabic-Clip/Araclip-Simplified'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 website: ', 'raw': '🌐 website: '}, {'type': 'link', 'href': 'https://arabic-clip.github.io/Arabic-CLIP/', 'raw': 'https://arabic-clip.github.io/Arabic-CLIP/'}]","🚀AraClip is now fully integrated with Hugging Face 🤗 + +AraClip is a specialized CLIP model that was created by @pain and optimized for Arabic text-image retrieval tasks🔥 + +🔗 Try it out 🔗 +🤖 model: https://huggingface.co/Arabic-Clip/araclip +🧩 Gradio demo: https://huggingface.co/spaces/Arabic-Clip/Araclip-Simplified +🌐 website: https://arabic-clip.github.io/Arabic-CLIP/","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/nUM2BTmWPT9AWqIom_M_o.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/B6PLmky1U2pCLGTXAyWFt.jpeg'}]","[{'_id': '61934cc71832e6ac3837d8b0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61934cc71832e6ac3837d8b0/y5d8VCVsQPQFnYM3BT-ew.jpeg', 'fullname': 'Mohammad Albarham', 'name': 'pain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 22}]","[{'reaction': '🔥', 'users': ['John6666', 'pain', 'MuzammilMax', 'dantezxcd', 'GeneralGost'], 'count': 5}, {'reaction': '❤️', 'users': ['pain', 'MuzammilMax', 'dantezxcd'], 'count': 3}, {'reaction': '🚀', 'users': ['pain'], 'count': 1}]",2025-03-12 13:45:20,2025-03-14 01:59:58.553,"[{'_id': '61934cc71832e6ac3837d8b0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61934cc71832e6ac3837d8b0/y5d8VCVsQPQFnYM3BT-ew.jpeg', 'fullname': 'Mohammad Albarham', 'name': 'pain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 22, 'isFollowing': False}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}]",/posts/not-lain/517384672532857,3685,"{'language': 'en', 'probability': 0.8434544801712036}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/651e96991b97c9f33d26bde6/-Bqs6qrmz0yCfwtB2e-6q.jpeg,179.0,Elie Bakouch,eliebak,473622204638987,"[{'type': 'text', 'value': 'Google just dropped an exciting technical report for the brand-new Gemma3 model! 🚀 Here are my personal notes highlighting the most intriguing architectural innovations, design choices, and insights from this release:', 'raw': 'Google just dropped an exciting technical report for the brand-new Gemma3 model! 🚀 Here are my personal notes highlighting the most intriguing architectural innovations, design choices, and insights from this release:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1) Architecture choices:', 'raw': '1) Architecture choices:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> No more softcaping, replace by QK-Norm', 'raw': '> No more softcaping, replace by QK-Norm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Both Pre AND Post Norm', 'raw': '> Both Pre AND Post Norm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Wider MLP than Qwen2.5, ~ same depth', 'raw': '> Wider MLP than Qwen2.5, ~ same depth'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> SWA with 5:1 and 1024 (very small and cool ablation on the paper!)', 'raw': '> SWA with 5:1 and 1024 (very small and cool ablation on the paper!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> No MLA to save KV cache, SWA do the job! ', 'raw': '> No MLA to save KV cache, SWA do the job! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2) Long context', 'raw': '2) Long context'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Only increase the rope in the global layer (to 1M) ', 'raw': '> Only increase the rope in the global layer (to 1M) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""> Confirmation that it's harder to do long context for smol models, no 128k for the 1B"", 'raw': ""> Confirmation that it's harder to do long context for smol models, no 128k for the 1B""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Pretrained with 32k context? seems very high', 'raw': '> Pretrained with 32k context? seems very high'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> No yarn nor llama3 like rope extension', 'raw': '> No yarn nor llama3 like rope extension'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3) Distillation', 'raw': '3) Distillation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Only keep te first 256 logits for the teacher', 'raw': '> Only keep te first 256 logits for the teacher'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Ablation on the teacher gap (tl;dr you need some ""patience"" to see that using a small teacher is better)', 'raw': '> Ablation on the teacher gap (tl;dr you need some ""patience"" to see that using a small teacher is better)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> On policy distillation yeahh (by ', 'raw': '> On policy distillation yeahh (by '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '@agarwl_', 'raw': '@agarwl_'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' et al), not sure if the teacher gap behave the same here, curious if someone have more info?', 'raw': ' et al), not sure if the teacher gap behave the same here, curious if someone have more info?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4) Others', 'raw': '4) Others'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""> Checkpoint with QAT, that's very cool "", 'raw': ""> Checkpoint with QAT, that's very cool ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> RL using improve version of BOND, WARM/WARP good excuse to look at ', 'raw': '> RL using improve version of BOND, WARM/WARP good excuse to look at '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'ramealexandre', 'raw': '@ramealexandre'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' papers', 'raw': ' papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Only use Zero3, no TP/PP if i understand correctly ? ', 'raw': '> Only use Zero3, no TP/PP if i understand correctly ? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Training budget relatively similar than gemma2', 'raw': '> Training budget relatively similar than gemma2'}]","Google just dropped an exciting technical report for the brand-new Gemma3 model! 🚀 Here are my personal notes highlighting the most intriguing architectural innovations, design choices, and insights from this release: + +1) Architecture choices: +> No more softcaping, replace by QK-Norm +> Both Pre AND Post Norm +> Wider MLP than Qwen2.5, ~ same depth +> SWA with 5:1 and 1024 (very small and cool ablation on the paper!) +> No MLA to save KV cache, SWA do the job! + +2) Long context +> Only increase the rope in the global layer (to 1M) +> Confirmation that it's harder to do long context for smol models, no 128k for the 1B +> Pretrained with 32k context? seems very high +> No yarn nor llama3 like rope extension + +3) Distillation +> Only keep te first 256 logits for the teacher +> Ablation on the teacher gap (tl;dr you need some ""patience"" to see that using a small teacher is better) +> On policy distillation yeahh (by +@agarwl_ + et al), not sure if the teacher gap behave the same here, curious if someone have more info? + +4) Others +> Checkpoint with QAT, that's very cool +> RL using improve version of BOND, WARM/WARP good excuse to look at +@ramealexandre + papers +> Only use Zero3, no TP/PP if i understand correctly ? +> Training budget relatively similar than gemma2","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/651e96991b97c9f33d26bde6/K6F8PCrPGcPs0N55C3ElL.png'}]",[],"[{'reaction': '🔥', 'users': ['Steveeeeeeen', 'BrigitteTousi', 'Yehor', 'John6666', 'AtAndDev', 'sugatoray', 'dantezxcd'], 'count': 7}]",2025-03-12 12:05:51,2025-03-12 18:14:24.801,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/eliebak/473622204638987,1875,"{'language': 'en', 'probability': 0.8401225805282593}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,894288519864452,"[{'type': 'text', 'value': 'Spark TTS 🔊New OPEN TTS model that can generate any voice with just seconds of audio! ', 'raw': 'Spark TTS 🔊New OPEN TTS model that can generate any voice with just seconds of audio! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Released by SparkAudio community🔥', 'raw': 'Released by SparkAudio community🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model👉 ', 'raw': 'Model👉 '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'SparkAudio/Spark-TTS-0.5B'}, 'url': 'https://huggingface.co/SparkAudio/Spark-TTS-0.5B', 'raw': 'https://huggingface.co/SparkAudio/Spark-TTS-0.5B'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper👉 ', 'raw': 'Paper👉 '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.01710'}, 'url': 'https://huggingface.co/papers/2503.01710', 'raw': 'https://huggingface.co/papers/2503.01710', 'label': 'Spark-TTS: An Efficient LLM-Based Text-to-Speech Model with\n Single-Stream Decoupled Speech Tokens (2503.01710)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Supports English & Chinese', 'raw': '✨ Supports English & Chinese'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ BiCodec Speech Codec: Enables precise voice control by separating semantics & speaker attributes', 'raw': '✨ BiCodec Speech Codec: Enables precise voice control by separating semantics & speaker attributes'}]","Spark TTS 🔊New OPEN TTS model that can generate any voice with just seconds of audio! + +Released by SparkAudio community🔥 + +Model👉 https://huggingface.co/SparkAudio/Spark-TTS-0.5B +Paper👉 https://huggingface.co/papers/2503.01710 + +✨ Supports English & Chinese +✨ BiCodec Speech Codec: Enables precise voice control by separating semantics & speaker attributes","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/08YZA7-_vlZgZh6wEvit4.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/agP33z8pQtimVd_Hko90V.png'}]",[],"[{'reaction': '🔥', 'users': ['BrigitteTousi', 'John6666', 'Mdubbya', 'JohnRoger', 'dantezxcd'], 'count': 5}]",2025-03-12 11:23:16,2025-03-12 11:23:24.488,[],/posts/AdinaY/894288519864452,1337,"{'language': 'en', 'probability': 0.773253858089447}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64b16a747da6a1dca85604dc/0qkOUWMPZF4PNPCI0WYHy.jpeg,45.0,hanzlajavaid,hanzla,515980369678772,"[{'type': 'text', 'value': 'Gemma 3 is a game changer for on device multimodal applications. ', 'raw': 'Gemma 3 is a game changer for on device multimodal applications. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try for yourself how a 4 billion parameter model can be so good.', 'raw': 'Try for yourself how a 4 billion parameter model can be so good.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'hanzla/PlaygroundGemma3'}, 'url': 'https://huggingface.co/spaces/hanzla/PlaygroundGemma3', 'raw': 'https://huggingface.co/spaces/hanzla/PlaygroundGemma3'}]","Gemma 3 is a game changer for on device multimodal applications. + +Try for yourself how a 4 billion parameter model can be so good. + +https://huggingface.co/spaces/hanzla/PlaygroundGemma3",[],[],"[{'reaction': '👍', 'users': ['John6666', 'AtAndDev', 'dantezxcd'], 'count': 3}]",2025-03-12 10:45:00,2025-03-12 18:23:01.645,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/hanzla/515980369678772,1256,"{'language': 'en', 'probability': 0.662793755531311}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,114721160529960,"[{'type': 'text', 'value': 'R1-Omni🔥RLVR-Powered Multimodal LLM released by Alibaba ', 'raw': 'R1-Omni🔥RLVR-Powered Multimodal LLM released by Alibaba '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'StarJiaxing/R1-Omni-0.5B'}, 'url': 'https://huggingface.co/StarJiaxing/R1-Omni-0.5B', 'raw': 'https://huggingface.co/StarJiaxing/R1-Omni-0.5B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2503.05379'}, 'url': 'https://huggingface.co/papers/2503.05379', 'raw': 'https://huggingface.co/papers/2503.05379', 'label': 'R1-Omni: Explainable Omni-Multimodal Emotion Recognition with\n Reinforcing Learning (2503.05379)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨0.5B with Apache2.0', 'raw': '✨0.5B with Apache2.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Improve emotion recognition with visual and audio cues', 'raw': '✨ Improve emotion recognition with visual and audio cues'}, {'type': 'new_line', 'raw': '\n'}]","R1-Omni🔥RLVR-Powered Multimodal LLM released by Alibaba + +Model: https://huggingface.co/StarJiaxing/R1-Omni-0.5B +Paper: https://huggingface.co/papers/2503.05379 + +✨0.5B with Apache2.0 +✨ Improve emotion recognition with visual and audio cues +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/i6xXEa4UrK8a-7aSsgdk-.png'}]",[],"[{'reaction': '🔥', 'users': ['BrigitteTousi', 'Quazim0t0', 'BrokenFL', 'John6666', 'lukiebub-house-band-ai', 'dantezxcd'], 'count': 6}]",2025-03-12 10:38:38,2025-03-12 23:09:11.107,"[{'_id': '6668dabdab2fe65d9366f60c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6668dabdab2fe65d9366f60c/6_WFwtHjlbW6WJkVZ4cWD.jpeg', 'fullname': 'Lubub IA', 'name': 'lukiebub-house-band-ai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/AdinaY/114721160529960,1456,"{'language': 'en', 'probability': 0.7489116191864014}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/qW3-oKDLFJpue2iS5VjT2.jpeg,134.0,Jason Corkill,jasoncorkill,287919306604058,"[{'type': 'text', 'value': ""Benchmarking Google's Veo2: How Does It Compare?"", 'raw': ""Benchmarking Google's Veo2: How Does It Compare?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The results did not meet expectations. Veo2 struggled with style consistency and temporal coherence, falling behind competitors like Runway, Pika, Tencent, and even Alibaba. While the model shows promise, its alignment and quality are not yet there.', 'raw': 'The results did not meet expectations. Veo2 struggled with style consistency and temporal coherence, falling behind competitors like Runway, Pika, Tencent, and even Alibaba. While the model shows promise, its alignment and quality are not yet there.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Google recently launched Veo2, its latest text-to-video model, through select partners like fal.ai. As part of our ongoing evaluation of state-of-the-art generative video models, we rigorously benchmarked Veo2 against industry leaders.', 'raw': 'Google recently launched Veo2, its latest text-to-video model, through select partners like fal.ai. As part of our ongoing evaluation of state-of-the-art generative video models, we rigorously benchmarked Veo2 against industry leaders.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We generated a large set of Veo2 videos spending hundreds of dollars in the process and systematically evaluated them using our Python-based API for human and automated labeling.', 'raw': 'We generated a large set of Veo2 videos spending hundreds of dollars in the process and systematically evaluated them using our Python-based API for human and automated labeling.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the ranking here: ', 'raw': 'Check out the ranking here: '}, {'type': 'link', 'href': 'https://www.rapidata.ai/leaderboard/video-models', 'raw': 'https://www.rapidata.ai/leaderboard/video-models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Rapidata/text-2-video-human-preferences-veo2'}, 'url': 'https://huggingface.co/datasets/Rapidata/text-2-video-human-preferences-veo2', 'raw': 'https://huggingface.co/datasets/Rapidata/text-2-video-human-preferences-veo2'}, {'type': 'new_line', 'raw': '\n'}]","Benchmarking Google's Veo2: How Does It Compare? + +The results did not meet expectations. Veo2 struggled with style consistency and temporal coherence, falling behind competitors like Runway, Pika, Tencent, and even Alibaba. While the model shows promise, its alignment and quality are not yet there. + +Google recently launched Veo2, its latest text-to-video model, through select partners like fal.ai. As part of our ongoing evaluation of state-of-the-art generative video models, we rigorously benchmarked Veo2 against industry leaders. + +We generated a large set of Veo2 videos spending hundreds of dollars in the process and systematically evaluated them using our Python-based API for human and automated labeling. + +Check out the ranking here: https://www.rapidata.ai/leaderboard/video-models + +https://huggingface.co/datasets/Rapidata/text-2-video-human-preferences-veo2 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66f5624c42b853e73e0738eb/H256Bw0vdQtDcLeu07VHZ.png'}]",[],"[{'reaction': '👀', 'users': ['jasoncorkill', 'canwiper', 'maalber', 'LucStr', 'Kchanger', 'LinoGiger', 'BrigitteTousi', 'AtAndDev', 'dantezxcd'], 'count': 9}, {'reaction': '🔥', 'users': ['jparavicini', 'LucStr', 'Sneccello', 'Kchanger', 'E0R', 'John6666', 'AtAndDev', 'dantezxcd'], 'count': 8}, {'reaction': '👍', 'users': ['tmanuel', 'dantezxcd'], 'count': 2}]",2025-03-12 10:13:39,2025-03-12 10:16:18.574,[],/posts/jasoncorkill/287919306604058,2366,"{'language': 'en', 'probability': 0.8564414381980896}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/M8ln8L-HFXbpO3vn-_1xn.png,13.0,Pranjali Joshi,PranjaliJoshi,972536658562466,"[{'type': 'text', 'value': '🌍 Have you tried Cosmos world foundation models on Hugging Face? Because more updates are coming! 🚀', 'raw': '🌍 Have you tried Cosmos world foundation models on Hugging Face? Because more updates are coming! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cosmos world foundation models (WFMs) are generative pretrained models for synthetic data generation for training AI models for robot or autonomous vehicle development.', 'raw': 'Cosmos world foundation models (WFMs) are generative pretrained models for synthetic data generation for training AI models for robot or autonomous vehicle development.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ If you are building generative VLMs or foundation models for physical AI like policy models- there are new updates coming at NVIDIA GTC.', 'raw': '🛠️ If you are building generative VLMs or foundation models for physical AI like policy models- there are new updates coming at NVIDIA GTC.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GTC is NVIDIA’s biggest annual event (March 17-21) - it will have deep dives, training labs, and researcher-led sessions on Cosmos. ', 'raw': 'GTC is NVIDIA’s biggest annual event (March 17-21) - it will have deep dives, training labs, and researcher-led sessions on Cosmos. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Plus, Jensen Huang’s keynote! 🎤 ', 'raw': 'Plus, Jensen Huang’s keynote! 🎤 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎟️ 20% off GTC registration → Use code HUGGINGFACE20 ', 'raw': '🎟️ 20% off GTC registration → Use code HUGGINGFACE20 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 ', 'raw': '🔗 '}, {'type': 'link', 'href': 'https://www.nvidia.com/gtc/', 'raw': 'https://www.nvidia.com/gtc/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📍 Happening in person at the San Jose Convention Center and online. ', 'raw': '📍 Happening in person at the San Jose Convention Center and online. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explore all Cosmos sessions at GTC: ', 'raw': 'Explore all Cosmos sessions at GTC: '}, {'type': 'link', 'href': 'https://nvda.ws/41yBkmY', 'raw': 'https://nvda.ws/41yBkmY'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Try the existing Cosmos WFMs: ', 'raw': ' Try the existing Cosmos WFMs: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Hugging Face models: ', 'raw': '🔗 Hugging Face models: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'nvidia/cosmos-6751e884dc10e013a0a0d8e6'}, 'url': 'https://huggingface.co/collections/nvidia/cosmos-6751e884dc10e013a0a0d8e6', 'raw': 'https://huggingface.co/collections/nvidia/cosmos-6751e884dc10e013a0a0d8e6'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ Post-training scripts: ', 'raw': '🛠️ Post-training scripts: '}, {'type': 'link', 'href': 'https://github.com/NVIDIA/Cosmos/blob/main/cosmos1/models/POST_TRAINING.md', 'raw': 'https://github.com/NVIDIA/Cosmos/blob/main/cosmos1/models/POST_TRAINING.md'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","🌍 Have you tried Cosmos world foundation models on Hugging Face? Because more updates are coming! 🚀 + +Cosmos world foundation models (WFMs) are generative pretrained models for synthetic data generation for training AI models for robot or autonomous vehicle development. + +🛠️ If you are building generative VLMs or foundation models for physical AI like policy models- there are new updates coming at NVIDIA GTC. + +GTC is NVIDIA’s biggest annual event (March 17-21) - it will have deep dives, training labs, and researcher-led sessions on Cosmos. + +Plus, Jensen Huang’s keynote! 🎤 + +🎟️ 20% off GTC registration �� Use code HUGGINGFACE20 +🔗 https://www.nvidia.com/gtc/ +📍 Happening in person at the San Jose Convention Center and online. +Explore all Cosmos sessions at GTC: https://nvda.ws/41yBkmY + + Try the existing Cosmos WFMs: + +🔗 Hugging Face models: https://huggingface.co/collections/nvidia/cosmos-6751e884dc10e013a0a0d8e6 + +🛠️ Post-training scripts: https://github.com/NVIDIA/Cosmos/blob/main/cosmos1/models/POST_TRAINING.md +",[],[],"[{'reaction': '👀', 'users': ['John6666', 'reach-vb', 'dantezxcd'], 'count': 3}, {'reaction': '❤️', 'users': ['reach-vb', 'dantezxcd'], 'count': 2}]",2025-03-07 18:14:02,2025-03-07 19:00:34.506,"[{'_id': '677c471f8b4c4e271e57eaa5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/M8ln8L-HFXbpO3vn-_1xn.png', 'fullname': 'Pranjali Joshi', 'name': 'PranjaliJoshi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 13, 'isFollowing': False}]",/posts/PranjaliJoshi/972536658562466,696,"{'language': 'en', 'probability': 0.7761051654815674}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/66c8dc99951843ca6762fe02/yagkY9dY7_-qw8hAAPWiK.png,93.0,Rebekah Bogdanoff,DualityAI-RebekahBogdanoff,233802872800595,"[{'type': 'text', 'value': '🚀 Duality is super excited to announce that our Kaggle competition is LIVE! Synthetic-to-Real Object Detection Challenge is LIVE! 🚦 ', 'raw': '🚀 Duality is super excited to announce that our Kaggle competition is LIVE! Synthetic-to-Real Object Detection Challenge is LIVE! 🚦 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Want to master AI training, learn industry-proven synthetic data workflows, and compete for public recognition and cash prizes? ', 'raw': 'Want to master AI training, learn industry-proven synthetic data workflows, and compete for public recognition and cash prizes? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Join our Synthetic-to-Real Object Detection Challenge on Kaggle! ', 'raw': '👉 Join our Synthetic-to-Real Object Detection Challenge on Kaggle! '}, {'type': 'link', 'href': 'https://www.kaggle.com/competitions/synthetic-2-real-object-detection-challenge/overview', 'raw': 'https://www.kaggle.com/competitions/synthetic-2-real-object-detection-challenge/overview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Compete to build the top-performing model capable of detecting real-world objects—trained entirely on synthetic data. Master these industry-proven methods for faster, more targeted, and diverse dataset creation, and set yourself apart, unlocking today's most exciting AI opportunities. "", 'raw': ""Compete to build the top-performing model capable of detecting real-world objects—trained entirely on synthetic data. Master these industry-proven methods for faster, more targeted, and diverse dataset creation, and set yourself apart, unlocking today's most exciting AI opportunities. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ready to test your skills?', 'raw': 'Ready to test your skills?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 The Challenge', 'raw': '🏆 The Challenge'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Train an object detection model using synthetic images created with Falcon—Duality AI's cutting-edge digital twin simulation software—then evaluate your model on real-world imagery."", 'raw': ""Train an object detection model using synthetic images created with Falcon—Duality AI's cutting-edge digital twin simulation software—then evaluate your model on real-world imagery.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Twist?', 'raw': 'The Twist?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📈 Boost your model’s accuracy by creating and refining your own custom synthetic datasets using Falcon! Get access to the tools and double the data by following this link and creating a free account- ', 'raw': '📈 Boost your model’s accuracy by creating and refining your own custom synthetic datasets using Falcon! Get access to the tools and double the data by following this link and creating a free account- '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://falcon.duality.ai/secure/documentation/ex-1-objdetection?sidebarMode=learn', 'raw': 'https://falcon.duality.ai/secure/documentation/ex-1-objdetection?sidebarMode=learn'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Win Cash Prizes & Recognition', 'raw': 'Win Cash Prizes & Recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Earn cash and public shout-outs from the Duality AI accounts ', 'raw': '🔹 Earn cash and public shout-outs from the Duality AI accounts '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enhance Your Portfolio', 'raw': 'Enhance Your Portfolio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Demonstrate your real-world AI and ML expertise in object detection to prospective employers and collaborators.', 'raw': '🔹 Demonstrate your real-world AI and ML expertise in object detection to prospective employers and collaborators.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Expand Your Network', 'raw': 'Expand Your Network'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Engage, compete, and collaborate with fellow ML engineers, researchers, and students.', 'raw': '🔹 Engage, compete, and collaborate with fellow ML engineers, researchers, and students.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Put your skills to the test and join our Kaggle competition today: ', 'raw': '🚀 Put your skills to the test and join our Kaggle competition today: '}, {'type': 'link', 'href': 'https://www.kaggle.com/competitions/synthetic-2-real-object-detection-challenge/overview', 'raw': 'https://www.kaggle.com/competitions/synthetic-2-real-object-detection-challenge/overview'}, {'type': 'new_line', 'raw': '\n'}]","🚀 Duality is super excited to announce that our Kaggle competition is LIVE! Synthetic-to-Real Object Detection Challenge is LIVE! 🚦 +Want to master AI training, learn industry-proven synthetic data workflows, and compete for public recognition and cash prizes? + +👉 Join our Synthetic-to-Real Object Detection Challenge on Kaggle! https://www.kaggle.com/competitions/synthetic-2-real-object-detection-challenge/overview + +Compete to build the top-performing model capable of detecting real-world objects—trained entirely on synthetic data. Master these industry-proven methods for faster, more targeted, and diverse dataset creation, and set yourself apart, unlocking today's most exciting AI opportunities. + +Ready to test your skills? + +🏆 The Challenge + +Train an object detection model using synthetic images created with Falcon—Duality AI's cutting-edge digital twin simulation software—then evaluate your model on real-world imagery. + +The Twist? +📈 Boost your model’s accuracy by creating and refining your own custom synthetic datasets using Falcon! Get access to the tools and double the data by following this link and creating a free account- +https://falcon.duality.ai/secure/documentation/ex-1-objdetection?sidebarMode=learn + +Win Cash Prizes & Recognition +🔹 Earn cash and public shout-outs from the Duality AI accounts +Enhance Your Portfolio +🔹 Demonstrate your real-world AI and ML expertise in object detection to prospective employers and collaborators. +Expand Your Network +🔹 Engage, compete, and collaborate with fellow ML engineers, researchers, and students. + +🚀 Put your skills to the test and join our Kaggle competition today: https://www.kaggle.com/competitions/synthetic-2-real-object-detection-challenge/overview +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66c8dc99951843ca6762fe02/JO73VYJ3AfI6xK-5FmTmu.png'}]",[],"[{'reaction': '🚀', 'users': ['Gandharv', 'MMSDuality', 'John6666', 'not-lain', 'DeathGodlike', 'Mike-DualityAI', 'IamValeAI'], 'count': 7}, {'reaction': '🔥', 'users': ['MMSDuality', 'DualityAI-RebekahBogdanoff', 'gsantopaolo', 'dantezxcd'], 'count': 4}, {'reaction': '❤️', 'users': ['MMSDuality', 'clippinglab071', 'Mike-DualityAI', 'dantezxcd'], 'count': 4}, {'reaction': '🧠', 'users': ['MMSDuality'], 'count': 1}, {'reaction': '😎', 'users': ['DualityAI-RebekahBogdanoff'], 'count': 1}, {'reaction': '🤯', 'users': ['DualityAI-RebekahBogdanoff'], 'count': 1}, {'reaction': '🤗', 'users': ['DualityAI-RebekahBogdanoff'], 'count': 1}]",2025-03-07 18:05:58,2025-03-07 18:28:42.114,[],/posts/DualityAI-RebekahBogdanoff/233802872800595,2855,"{'language': 'en', 'probability': 0.8629403114318848}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d594d99621f3f1b14d776d/ntGadxUU7_iYsF09pNbcX.jpeg,26.0,Shukdev Datta,shukdevdatta123,793510866829260,"[{'type': 'text', 'value': 'Introducing: Multi Modal Omni Chatbot', 'raw': 'Introducing: Multi Modal Omni Chatbot'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Exciting News! 🤖💡', 'raw': '🚀 Exciting News! 🤖💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I just built a Multimodal Chatbot that can understand both Text and Images! 🌐🖼️', 'raw': 'I just built a Multimodal Chatbot that can understand both Text and Images! 🌐🖼️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎤 Features:', 'raw': '🎤 Features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Text Input: Ask any question or provide text, and get smart answers! 🧠✨', 'raw': 'Text Input: Ask any question or provide text, and get smart answers! 🧠✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Image Input: Upload an image and let the assistant help you with it! 🤳🧐', 'raw': 'Image Input: Upload an image and let the assistant help you with it! 🤳🧐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Customizable: Choose the level of reasoning effort you want! 🧩', 'raw': 'Customizable: Choose the level of reasoning effort you want! 🧩'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Choose Your Model: Switch between image-focused and text-focused models! 🔄', 'raw': 'Choose Your Model: Switch between image-focused and text-focused models! 🔄'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔑 How It Works:', 'raw': '🔑 How It Works:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enter your OpenAI API Key 🔑', 'raw': 'Enter your OpenAI API Key 🔑'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Type a Text Question or Upload an Image 📸', 'raw': 'Type a Text Question or Upload an Image 📸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Get intelligent responses from the assistant 🗣️💬', 'raw': 'Get intelligent responses from the assistant 🗣️💬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Plus, you can clear your chat history whenever you want! 🧹✨', 'raw': 'Plus, you can clear your chat history whenever you want! 🧹✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Perfect for all your multimodal needs, whether you're working with text or visuals! Try it out and explore how AI can be your new assistant! 💬🤖"", 'raw': ""Perfect for all your multimodal needs, whether you're working with text or visuals! Try it out and explore how AI can be your new assistant! 💬🤖""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'link', 'href': 'https://shukdevdatta123-multi-modal-o1-chatbot.hf.space', 'raw': 'https://shukdevdatta123-multi-modal-o1-chatbot.hf.space'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #OpenAI #Chatbot #MachineLearning #TechInnovation #Multimodal #Coding #Python #Gradio #AIForAll', 'raw': '#AI #OpenAI #Chatbot #MachineLearning #TechInnovation #Multimodal #Coding #Python #Gradio #AIForAll'}]","Introducing: Multi Modal Omni Chatbot + +🚀 Exciting News! 🤖💡 +I just built a Multimodal Chatbot that can understand both Text and Images! 🌐🖼️ + +🎤 Features: +Text Input: Ask any question or provide text, and get smart answers! 🧠✨ +Image Input: Upload an image and let the assistant help you with it! 🤳🧐 +Customizable: Choose the level of reasoning effort you want! 🧩 +Choose Your Model: Switch between image-focused and text-focused models! 🔄 + +🔑 How It Works: +Enter your OpenAI API Key 🔑 +Type a Text Question or Upload an Image 📸 +Get intelligent responses from the assistant 🗣️💬 +Plus, you can clear your chat history whenever you want! 🧹✨ +Perfect for all your multimodal needs, whether you're working with text or visuals! Try it out and explore how AI can be your new assistant! 💬🤖 + +Demo: https://shukdevdatta123-multi-modal-o1-chatbot.hf.space + +#AI #OpenAI #Chatbot #MachineLearning #TechInnovation #Multimodal #Coding #Python #Gradio #AIForAll",[],[],"[{'reaction': '👍', 'users': ['John6666', 'clippinglab071', 'dantezxcd'], 'count': 3}]",2025-03-07 17:48:51,2025-04-19 14:31:23.251,"[{'_id': '679cbe93644082bcceb764d1', 'avatarUrl': '/avatars/469b38ddf8f0536fa7273fd9aed11d99.svg', 'fullname': 'Black Subas', 'name': 'blacksubas', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/shukdevdatta123/793510866829260,1347,"{'language': 'en', 'probability': 0.8077228665351868}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1606406298765-noauth.jpeg,368.0,Albert Villanova del Moral,albertvillanova,159056887588114,"[{'type': 'text', 'value': '🚀 New smolagents update: Safer Local Python Execution! 🦾🐍', 'raw': '🚀 New smolagents update: Safer Local Python Execution! 🦾🐍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""With the latest release, we've added security checks to the local Python interpreter: every evaluation is now analyzed for dangerous builtins, modules, and functions. 🔒"", 'raw': ""With the latest release, we've added security checks to the local Python interpreter: every evaluation is now analyzed for dangerous builtins, modules, and functions. 🔒""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's why this matters & what you need to know! 🧵👇"", 'raw': ""Here's why this matters & what you need to know! 🧵👇""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Why is local execution risky? ⚠️', 'raw': '1️⃣ Why is local execution risky? ⚠️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI agents that run arbitrary Python code can unintentionally (or maliciously) access system files, run unsafe commands, or exfiltrate data.', 'raw': 'AI agents that run arbitrary Python code can unintentionally (or maliciously) access system files, run unsafe commands, or exfiltrate data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ New Safety Layer in smolagents 🛡️', 'raw': '2️⃣ New Safety Layer in smolagents 🛡️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We now inspect every return value during execution:', 'raw': 'We now inspect every return value during execution:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Allowed: Safe built-in types (e.g., numbers, strings, lists)', 'raw': '✅ Allowed: Safe built-in types (e.g., numbers, strings, lists)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⛔ Blocked: Dangerous functions/modules (e.g., os.system, subprocess, exec, shutil)', 'raw': '⛔ Blocked: Dangerous functions/modules (e.g., os.system, subprocess, exec, shutil)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Immediate Benefits 💡', 'raw': '3️⃣ Immediate Benefits 💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Prevent agents from accessing unsafe builtins', 'raw': ' - Prevent agents from accessing unsafe builtins'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Block unauthorized file or network access', 'raw': ' - Block unauthorized file or network access'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Reduce accidental security vulnerabilities', 'raw': ' - Reduce accidental security vulnerabilities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4️⃣ Security Disclaimer ⚠️', 'raw': '4️⃣ Security Disclaimer ⚠️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚨 Despite these improvements, local Python execution is NEVER 100% safe. 🚨', 'raw': '🚨 Despite these improvements, local Python execution is NEVER 100% safe. 🚨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you need true isolation, use a remote sandboxed executor like Docker or E2B.', 'raw': 'If you need true isolation, use a remote sandboxed executor like Docker or E2B.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5️⃣ The Best Practice: Use Sandboxed Execution 🔐', 'raw': '5️⃣ The Best Practice: Use Sandboxed Execution 🔐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For production-grade AI agents, we strongly recommend running code in a Docker or E2B sandbox to ensure complete isolation.', 'raw': 'For production-grade AI agents, we strongly recommend running code in a Docker or E2B sandbox to ensure complete isolation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6️⃣ Upgrade Now & Stay Safe! 🚀', 'raw': '6️⃣ Upgrade Now & Stay Safe! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the latest smolagents release and start building safer AI agents today.', 'raw': 'Check out the latest smolagents release and start building safer AI agents today.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 ', 'raw': '🔗 '}, {'type': 'link', 'href': 'https://github.com/huggingface/smolagents', 'raw': 'https://github.com/huggingface/smolagents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What security measures do you take when running AI-generated code? Let’s discuss! 👇', 'raw': 'What security measures do you take when running AI-generated code? Let’s discuss! 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #smolagents #Python #Security', 'raw': '#AI #smolagents #Python #Security'}]","🚀 New smolagents update: Safer Local Python Execution! 🦾🐍 + +With the latest release, we've added security checks to the local Python interpreter: every evaluation is now analyzed for dangerous builtins, modules, and functions. 🔒 + +Here's why this matters & what you need to know! 🧵👇 + +1️⃣ Why is local execution risky? ⚠️ +AI agents that run arbitrary Python code can unintentionally (or maliciously) access system files, run unsafe commands, or exfiltrate data. + +2️⃣ New Safety Layer in smolagents 🛡️ +We now inspect every return value during execution: +✅ Allowed: Safe built-in types (e.g., numbers, strings, lists) +⛔ Blocked: Dangerous functions/modules (e.g., os.system, subprocess, exec, shutil) + +3️⃣ Immediate Benefits 💡 + - Prevent agents from accessing unsafe builtins + - Block unauthorized file or network access + - Reduce accidental security vulnerabilities + +4️⃣ Security Disclaimer ⚠️ +🚨 Despite these improvements, local Python execution is NEVER 100% safe. 🚨 +If you need true isolation, use a remote sandboxed executor like Docker or E2B. + +5️⃣ The Best Practice: Use Sandboxed Execution 🔐 +For production-grade AI agents, we strongly recommend running code in a Docker or E2B sandbox to ensure complete isolation. + +6️⃣ Upgrade Now & Stay Safe! 🚀 +Check out the latest smolagents release and start building safer AI agents today. + +🔗 https://github.com/huggingface/smolagents + +What security measures do you take when running AI-generated code? Let’s discuss! 👇 + +#AI #smolagents #Python #Security","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fbfd09ee366524fe8e97cd3/yesRVTsiGOgtoU0nvSYwn.png'}]",[],"[{'reaction': '👍', 'users': ['sainathm', 'John6666', 'wsuff', 'bstagbrook', 'Fishtiks', 'ashbeekim', 'Tonic', 'mkurman', 'benhaotang', 'clippinglab071', 'dantezxcd'], 'count': 11}, {'reaction': '🤗', 'users': ['John6666', 'alvesrt', 'Svngoku', 'dantezxcd'], 'count': 4}, {'reaction': '😎', 'users': ['John6666', 'Fishtiks'], 'count': 2}, {'reaction': '👀', 'users': ['Svngoku'], 'count': 1}]",2025-03-07 11:01:21,2025-03-08 20:44:58.500,"[{'_id': '67cb1fb215882735a4d9cb4a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67cb1fb215882735a4d9cb4a/M046M7KfqfiqByIi9dcCH.jpeg', 'fullname': 'carsicko', 'name': 'carsicko', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '67ccac16de420ae77b80e394', 'avatarUrl': '/avatars/b992b4169f56608f816a24fdcb13e1b2.svg', 'fullname': 'insta', 'name': 'instapross', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/albertvillanova/159056887588114,4131,"{'language': 'en', 'probability': 0.7913345694541931}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6758312797bb6949e67d4a47/bBAysCFgr32JZu4j_Ry1J.jpeg,18.0,daavoo,daavoo,348063789486715,"[{'type': 'text', 'value': 'Hi there 👋! Check this project for mapping features in OpenStreetMap with Computer Vision:', 'raw': 'Hi there 👋! Check this project for mapping features in OpenStreetMap with Computer Vision:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⭐-> ', 'raw': '⭐-> '}, {'type': 'link', 'href': 'https://github.com/mozilla-ai/osm-ai-helper', 'raw': 'https://github.com/mozilla-ai/osm-ai-helper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And a live demo showing how to map new swimming pools 🏊:', 'raw': 'And a live demo showing how to map new swimming pools 🏊:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗺️ -> ', 'raw': '🗺️ -> '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'mozilla-ai/osm-ai-helper'}, 'url': 'https://huggingface.co/spaces/mozilla-ai/osm-ai-helper', 'raw': 'https://huggingface.co/spaces/mozilla-ai/osm-ai-helper'}]","Hi there 👋! Check this project for mapping features in OpenStreetMap with Computer Vision: + +⭐-> https://github.com/mozilla-ai/osm-ai-helper + +And a live demo showing how to map new swimming pools 🏊: + +🗺️ -> https://huggingface.co/spaces/mozilla-ai/osm-ai-helper",[],[],"[{'reaction': '❤️', 'users': ['Tonic', 'CreativeRiku', 'clippinglab071', 'atrisaxena', 'dantezxcd'], 'count': 5}, {'reaction': '👍', 'users': ['John6666', 'Tonic', 'not-lain', 'dantezxcd'], 'count': 4}]",2025-03-07 09:57:20,2025-03-07 09:59:26.363,[],/posts/daavoo/348063789486715,2064,"{'language': 'en', 'probability': 0.7002339959144592}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,504775154219041,"[{'type': 'text', 'value': '🙋🏻\u200d♂️Hey there folks,', 'raw': '🙋🏻\u200d♂️Hey there folks,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Did you know that you can use ModernBERT to detect model hallucinations ? ', 'raw': ' Did you know that you can use ModernBERT to detect model hallucinations ? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the Demo : ', 'raw': 'Check out the Demo : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Tonic/hallucination-test'}, 'url': 'https://huggingface.co/spaces/Tonic/hallucination-test', 'raw': 'https://huggingface.co/spaces/Tonic/hallucination-test'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'See here for Medical Context Demo : ', 'raw': 'See here for Medical Context Demo : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MultiTransformer/tonic-discharge-guard'}, 'url': 'https://huggingface.co/spaces/MultiTransformer/tonic-discharge-guard', 'raw': 'https://huggingface.co/spaces/MultiTransformer/tonic-discharge-guard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'check out the model from KRLabs : ', 'raw': 'check out the model from KRLabs : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'KRLabsOrg/lettucedect-large-modernbert-en-v1'}, 'url': 'https://huggingface.co/KRLabsOrg/lettucedect-large-modernbert-en-v1', 'raw': 'https://huggingface.co/KRLabsOrg/lettucedect-large-modernbert-en-v1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and the library they kindly open sourced for it : ', 'raw': 'and the library they kindly open sourced for it : '}, {'type': 'link', 'href': 'https://github.com/KRLabsOrg/LettuceDetect', 'raw': 'https://github.com/KRLabsOrg/LettuceDetect'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👆🏻if you like this topic please contribute code upstream 🚀', 'raw': '👆🏻if you like this topic please contribute code upstream 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","🙋🏻‍♂️Hey there folks, + + Did you know that you can use ModernBERT to detect model hallucinations ? + +Check out the Demo : https://huggingface.co/spaces/Tonic/hallucination-test + +See here for Medical Context Demo : https://huggingface.co/spaces/MultiTransformer/tonic-discharge-guard + +check out the model from KRLabs : https://huggingface.co/KRLabsOrg/lettucedect-large-modernbert-en-v1 + +and the library they kindly open sourced for it : https://github.com/KRLabsOrg/LettuceDetect + +👆🏻if you like this topic please contribute code upstream 🚀 + +",[],[],"[{'reaction': '👍', 'users': ['John6666', 'clippinglab071', 'dantezxcd'], 'count': 3}]",2025-03-07 09:21:24,2025-03-07 17:27:25.339,"[{'_id': '67cadbe623e26dc60e41a565', 'avatarUrl': '/avatars/38aaf6ffbf856eec2a54ee49ddceecfd.svg', 'fullname': 'IGBOKWE FRANK CHIKA', 'name': 'FrankZulu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}]",/posts/Tonic/504775154219041,1603,"{'language': 'en', 'probability': 0.6957852840423584}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,628675328401017,"[{'type': 'text', 'value': 'Thinking models are NOT intended when instructions must be somehow specific. Don’t use shit “cause” is cool, I STILL FEEL SCAMMED when they this is a new model. The REAL ‘Chain of Thoughts’ must be prompting done correct . ', 'raw': 'Thinking models are NOT intended when instructions must be somehow specific. Don’t use shit “cause” is cool, I STILL FEEL SCAMMED when they this is a new model. The REAL ‘Chain of Thoughts’ must be prompting done correct . '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I can thing that if I tell gpt-4o “', 'raw': 'I can thing that if I tell gpt-4o “'}]","Thinking models are NOT intended when instructions must be somehow specific. Don’t use shit “cause” is cool, I STILL FEEL SCAMMED when they this is a new model. The REAL ‘Chain of Thoughts’ must be prompting done correct . +I can thing that if I tell gpt-4o “",[],[],"[{'reaction': '🔥', 'users': ['dantezxcd'], 'count': 1}]",2025-03-07 02:19:04,2025-03-07 10:02:45.926,"[{'_id': '67c5774b307ece753808f512', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67c5774b307ece753808f512/VEuyFdsrUFUTO8LODdEw2.png', 'fullname': 'Christian Martin', 'name': 'crismart242', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/samihalawa/628675328401017,763,"{'language': 'en', 'probability': 0.8741028308868408}",1 +/avatars/fb866e3758189d70488fc6a879151f45.svg,21.0,Akihito Miyazaki,Akjava,598094995483343,"[{'type': 'text', 'value': 'A dataset of 50 instrumental music tracks generated with the DiffRhythm model, using 10 CC0-licensed instrument samples from OEPN Game Art.', 'raw': 'A dataset of 50 instrumental music tracks generated with the DiffRhythm model, using 10 CC0-licensed instrument samples from OEPN Game Art.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Akjava/diffrhythm-instrument-cc0-oepngamearg-10x5-generated'}, 'url': 'https://huggingface.co/datasets/Akjava/diffrhythm-instrument-cc0-oepngamearg-10x5-generated', 'raw': 'https://huggingface.co/datasets/Akjava/diffrhythm-instrument-cc0-oepngamearg-10x5-generated'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've released the dataset. It's a little skewed towards certain types of music. It might be interesting for people curious about the range of variations it can generate. It could also be a good starting point for experimenting with the Distrill model. I believe the quality is good enough to be used as background music for YouTube videos or probably as reference tracks for YuE or Udio."", 'raw': ""I've released the dataset. It's a little skewed towards certain types of music. It might be interesting for people curious about the range of variations it can generate. It could also be a good starting point for experimenting with the Distrill model. I believe the quality is good enough to be used as background music for YouTube videos or probably as reference tracks for YuE or Udio.""}]","A dataset of 50 instrumental music tracks generated with the DiffRhythm model, using 10 CC0-licensed instrument samples from OEPN Game Art. +https://huggingface.co/datasets/Akjava/diffrhythm-instrument-cc0-oepngamearg-10x5-generated + +I've released the dataset. It's a little skewed towards certain types of music. It might be interesting for people curious about the range of variations it can generate. It could also be a good starting point for experimenting with the Distrill model. I believe the quality is good enough to be used as background music for YouTube videos or probably as reference tracks for YuE or Udio.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'clippinglab071', 'dantezxcd'], 'count': 3}]",2025-03-07 01:16:20,2025-03-07 01:16:20.830,[],/posts/Akjava/598094995483343,753,"{'language': 'en', 'probability': 0.90188068151474}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61c396584b91d658673447d0/UDexWgm4v9jnvxAeau3E3.jpeg,66.0,Mariusz Kurman,mkurman,317887630445280,"[{'type': 'text', 'value': 'MedIT One 140M Fifth checkpoint after 9B tokens', 'raw': 'MedIT One 140M Fifth checkpoint after 9B tokens'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meditsolutions/medit-one-140M-9B-tokens-checkpoint'}, 'url': 'https://huggingface.co/meditsolutions/medit-one-140M-9B-tokens-checkpoint', 'raw': 'https://huggingface.co/meditsolutions/medit-one-140M-9B-tokens-checkpoint'}]","MedIT One 140M Fifth checkpoint after 9B tokens +https://huggingface.co/meditsolutions/medit-one-140M-9B-tokens-checkpoint",[],[],"[{'reaction': '👍', 'users': ['John6666', 'JLouisBiz', 'bndp', 'clippinglab071', 'dantezxcd'], 'count': 5}]",2025-03-06 18:35:59,2025-03-06 18:35:59.591,[],/posts/mkurman/317887630445280,2406,"{'language': 'en', 'probability': 0.6517626047134399}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/658f24cb35c41262d64af1a2/vUK_c6K821tq61AxSeV4i.png,41.0,Ed Addario,eaddario,795208057053852,"[{'type': 'text', 'value': 'Squeezing out tensor bits, part III and final (for now 😉)', 'raw': 'Squeezing out tensor bits, part III and final (for now 😉)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(For context please see: ', 'raw': '(For context please see: '}, {'type': 'link', 'href': 'https://huggingface.co/posts/eaddario/832567461491467', 'raw': 'https://huggingface.co/posts/eaddario/832567461491467'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have just finished uploading ', 'raw': 'I have just finished uploading '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'eaddario/Hammer2.1-7b-GGUF'}, 'url': 'https://huggingface.co/eaddario/Hammer2.1-7b-GGUF', 'raw': 'https://huggingface.co/eaddario/Hammer2.1-7b-GGUF'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'eaddario/Dolphin3.0-Mistral-24B-GGUF'}, 'url': 'https://huggingface.co/eaddario/Dolphin3.0-Mistral-24B-GGUF', 'raw': 'https://huggingface.co/eaddario/Dolphin3.0-Mistral-24B-GGUF'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'While I was able to get 7+% reduction with Hammer2.1-7b, the larger Dolphin3.0-Mistral-24B proved to be a more difficult nut to crack (only 3%).', 'raw': 'While I was able to get 7+% reduction with Hammer2.1-7b, the larger Dolphin3.0-Mistral-24B proved to be a more difficult nut to crack (only 3%).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I have an idea as to why this was the case, which I'll test with QwQ-32B, but it will be a while before I can find the time."", 'raw': ""I have an idea as to why this was the case, which I'll test with QwQ-32B, but it will be a while before I can find the time.""}]","Squeezing out tensor bits, part III and final (for now 😉) + +(For context please see: https://huggingface.co/posts/eaddario/832567461491467) + +I have just finished uploading https://huggingface.co/eaddario/Hammer2.1-7b-GGUF and https://huggingface.co/eaddario/Dolphin3.0-Mistral-24B-GGUF. + +While I was able to get 7+% reduction with Hammer2.1-7b, the larger Dolphin3.0-Mistral-24B proved to be a more difficult nut to crack (only 3%). + +I have an idea as to why this was the case, which I'll test with QwQ-32B, but it will be a while before I can find the time.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'clippinglab071', 'dantezxcd'], 'count': 3}]",2025-03-06 18:29:34,2025-03-06 18:29:34.105,[],/posts/eaddario/795208057053852,760,"{'language': 'en', 'probability': 0.9415120482444763}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,117676973080970,"[{'type': 'text', 'value': 'I was puzzled by the scope of 🐋DeepSeek🐋 projects, i.e. why they built (then open sourced) so many pieces which are all over their technology stack. Good engineers are minimalists. They build only when they have to. ', 'raw': 'I was puzzled by the scope of 🐋DeepSeek🐋 projects, i.e. why they built (then open sourced) so many pieces which are all over their technology stack. Good engineers are minimalists. They build only when they have to. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then I realized that FP8 should be the main driving force here. So your raw inter-GPU bandwidth is cut in half (H800). But if you compress your data presentation from 16 bits to 8 bits, then the effective throughput of your workload stays unchanged! ', 'raw': 'Then I realized that FP8 should be the main driving force here. So your raw inter-GPU bandwidth is cut in half (H800). But if you compress your data presentation from 16 bits to 8 bits, then the effective throughput of your workload stays unchanged! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The idea is simple but lots of work had to be done. Their v3 technical report will give you a wholistic view (better than reading the code). To summarize, data structure is the foundation to any software. Since FP8 was new and untried, the ecosystem wasn't there. So DeepSeek became the trailblazer. Before cooking your meals, you need to till the land, grow crops, and grind the flour 😅"", 'raw': ""The idea is simple but lots of work had to be done. Their v3 technical report will give you a wholistic view (better than reading the code). To summarize, data structure is the foundation to any software. Since FP8 was new and untried, the ecosystem wasn't there. So DeepSeek became the trailblazer. Before cooking your meals, you need to till the land, grow crops, and grind the flour 😅""}, {'type': 'new_line', 'raw': '\n'}]","I was puzzled by the scope of 🐋DeepSeek🐋 projects, i.e. why they built (then open sourced) so many pieces which are all over their technology stack. Good engineers are minimalists. They build only when they have to. + +Then I realized that FP8 should be the main driving force here. So your raw inter-GPU bandwidth is cut in half (H800). But if you compress your data presentation from 16 bits to 8 bits, then the effective throughput of your workload stays unchanged! + +The idea is simple but lots of work had to be done. Their v3 technical report will give you a wholistic view (better than reading the code). To summarize, data structure is the foundation to any software. Since FP8 was new and untried, the ecosystem wasn't there. So DeepSeek became the trailblazer. Before cooking your meals, you need to till the land, grow crops, and grind the flour 😅 +",[],[],"[{'reaction': '🚀', 'users': ['John6666', 'victor', 'wsuff', 'Makar7', 'abidlabs', 'dantezxcd'], 'count': 6}]",2025-03-04 05:02:22,2025-03-04 21:17:43.755,[],/posts/onekq/117676973080970,2531,"{'language': 'en', 'probability': 0.964727520942688}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1584020801691-noauth.jpeg,2805.0,Stefan Schweter,stefan-it,308944241065066,"[{'type': 'text', 'value': '🇹🇷 😍 I\'m very happy to finally announce my new Turkish LM called ""BERT5urk"":', 'raw': '🇹🇷 😍 I\'m very happy to finally announce my new Turkish LM called ""BERT5urk"":'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'stefan-it/bert5urk'}, 'url': 'https://huggingface.co/stefan-it/bert5urk', 'raw': 'https://huggingface.co/stefan-it/bert5urk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It is a 1.42B T5-based model, trained with UL2 pretraining objective on the Turkish part of the awesome ', 'raw': 'It is a 1.42B T5-based model, trained with UL2 pretraining objective on the Turkish part of the awesome '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HuggingFaceFW/fineweb-2'}, 'url': 'https://huggingface.co/datasets/HuggingFaceFW/fineweb-2', 'raw': 'https://huggingface.co/datasets/HuggingFaceFW/fineweb-2'}, {'type': 'text', 'value': ' dataset.', 'raw': ' dataset.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Feel free to check it out!', 'raw': 'Feel free to check it out!'}]","🇹🇷 😍 I'm very happy to finally announce my new Turkish LM called ""BERT5urk"": + +https://huggingface.co/stefan-it/bert5urk + +It is a 1.42B T5-based model, trained with UL2 pretraining objective on the Turkish part of the awesome https://huggingface.co/datasets/HuggingFaceFW/fineweb-2 dataset. + +Feel free to check it out!",[],[],"[{'reaction': '👀', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-03-03 23:41:50,2025-03-04 02:11:00.887,"[{'_id': '5e6a3d4ea9afd5125d9ec064', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1584020801691-noauth.jpeg', 'fullname': 'Stefan Schweter', 'name': 'stefan-it', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2805, 'isFollowing': False}]",/posts/stefan-it/308944241065066,1007,"{'language': 'en', 'probability': 0.8543182015419006}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,685788730899562,"[{'type': 'text', 'value': '🥳🥳Just achieved 25m 59s of research with plain ChatGPT 🔥 Had it doing a complete internet search in just ONE call visiting 443 websites! Hard to beat huh!', 'raw': '🥳🥳Just achieved 25m 59s of research with plain ChatGPT 🔥 Had it doing a complete internet search in just ONE call visiting 443 websites! Hard to beat huh!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PROMPT IN COMMENTS ', 'raw': 'PROMPT IN COMMENTS '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the Massive Article created by the prompt: ', 'raw': 'Check out the Massive Article created by the prompt: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/luigi12345/automating-lead-generation-with-ai', 'raw': 'https://huggingface.co/blog/luigi12345/automating-lead-generation-with-ai'}]","🥳🥳Just achieved 25m 59s of research with plain ChatGPT 🔥 Had it doing a complete internet search in just ONE call visiting 443 websites! Hard to beat huh! +PROMPT IN COMMENTS +Check out the Massive Article created by the prompt: +https://huggingface.co/blog/luigi12345/automating-lead-generation-with-ai","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d00458fff501149572827f/diqTj9MdrOUxvggCTurQR.jpeg'}]",[],"[{'reaction': '👀', 'users': ['RedSparkie', 'John6666', 'Reltih14', 'alexandrospopov', 'dantezxcd'], 'count': 5}, {'reaction': '❤️', 'users': ['morongosteve', 'samihalawa', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['DeathGodlike'], 'count': 1}]",2025-03-03 23:13:44,2025-03-07 02:03:44.313,"[{'_id': '677cb391ae2cce31b6b2b2d4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/zP2GtpjDU03OY9tcJEkvD.png', 'fullname': 'mossybwuny', 'name': 'mossybwuny', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '6776b55fb167f35a2227afec', 'avatarUrl': '/avatars/9a0f25c215383edefe2e24fc8e2adcce.svg', 'fullname': 'Shahmeer MALIK', 'name': 'meer2731', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6538e1d056c9b35961defc7d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/Ug631YAvSG8Hk5ydvawrp.jpeg', 'fullname': 'Usama Ahmed', 'name': 'usamaahmedkhan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '64ae73c1404272919b1743a4', 'avatarUrl': '/avatars/6bd923632ece903fe45cc44a67180c17.svg', 'fullname': 'Arhan Ayan', 'name': 'arhnayan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65245e2da9a710554b14e92f', 'avatarUrl': '/avatars/f5634b2181ecbb0fc1d272a353eda9d4.svg', 'fullname': 'Ali Hmaou', 'name': 'alihmaou', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '67be900198943267590afd27', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67be900198943267590afd27/8_5MQU_9p1oGEr0I84KOb.jpeg', 'fullname': 'panjinhao', 'name': 'ishaqsaviani', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '65d00458fff501149572827f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg', 'fullname': 'Sami Halawa', 'name': 'samihalawa', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}]",/posts/samihalawa/685788730899562,2864,"{'language': 'en', 'probability': 0.7848332524299622}",9 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,143022720291674,"[{'type': 'text', 'value': '🌐 Public MediaWiki Collection Dataset - ', 'raw': '🌐 Public MediaWiki Collection Dataset - '}, {'type': 'link', 'href': 'https://huggingface.co/datasets/nyuuzyou/wikis', 'raw': 'https://huggingface.co/datasets/nyuuzyou/wikis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection of 1.66M+ articles from 930 public MediaWiki instances featuring:', 'raw': 'Collection of 1.66M+ articles from 930 public MediaWiki instances featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Full article content from diverse public wikis across the internet', 'raw': '- Full article content from diverse public wikis across the internet'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Complete metadata including templates, categories, and section structure', 'raw': '- Complete metadata including templates, categories, and section structure'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Rich structural information preserving wiki organization and links', 'raw': '- Rich structural information preserving wiki organization and links'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual content across 35+ languages including English, Chinese, Spanish, and more', 'raw': '- Multilingual content across 35+ languages including English, Chinese, Spanish, and more'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Regional language variants including US/UK English, Brazilian Portuguese, and Traditional/Simplified Chinese', 'raw': '- Regional language variants including US/UK English, Brazilian Portuguese, and Traditional/Simplified Chinese'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key contents:', 'raw': 'Key contents:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 1,662,448 wiki articles with full text', 'raw': '- 1,662,448 wiki articles with full text'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Extensive metadata including templates, categories, sections', 'raw': '- Extensive metadata including templates, categories, sections'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Internal wikilinks and external reference information', 'raw': '- Internal wikilinks and external reference information'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cross-domain knowledge spanning multiple topics and fields', 'raw': '- Cross-domain knowledge spanning multiple topics and fields'}]","🌐 Public MediaWiki Collection Dataset - https://huggingface.co/datasets/nyuuzyou/wikis + +Collection of 1.66M+ articles from 930 public MediaWiki instances featuring: + +- Full article content from diverse public wikis across the internet +- Complete metadata including templates, categories, and section structure +- Rich structural information preserving wiki organization and links +- Multilingual content across 35+ languages including English, Chinese, Spanish, and more +- Regional language variants including US/UK English, Brazilian Portuguese, and Traditional/Simplified Chinese + +Key contents: +- 1,662,448 wiki articles with full text +- Extensive metadata including templates, categories, sections +- Internal wikilinks and external reference information +- Cross-domain knowledge spanning multiple topics and fields",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-03-03 20:21:19,2025-03-03 20:21:19.755,[],/posts/nyuuzyou/143022720291674,583,"{'language': 'en', 'probability': 0.6790907382965088}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61c396584b91d658673447d0/UDexWgm4v9jnvxAeau3E3.jpeg,66.0,Mariusz Kurman,mkurman,857844914709353,"[{'type': 'text', 'value': 'I have uploaded the third pre-training checkpoint after 6 billion tokens to demonstrate that the MedIT One architecture is trainable.', 'raw': 'I have uploaded the third pre-training checkpoint after 6 billion tokens to demonstrate that the MedIT One architecture is trainable.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Give it some noise plz! Love u all :D', 'raw': 'Give it some noise plz! Love u all :D'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meditsolutions/medit-one-140M-6B-tokens-checkpoint'}, 'url': 'https://huggingface.co/meditsolutions/medit-one-140M-6B-tokens-checkpoint', 'raw': 'https://huggingface.co/meditsolutions/medit-one-140M-6B-tokens-checkpoint'}, {'type': 'new_line', 'raw': '\n'}]","I have uploaded the third pre-training checkpoint after 6 billion tokens to demonstrate that the MedIT One architecture is trainable. + +Give it some noise plz! Love u all :D + +https://huggingface.co/meditsolutions/medit-one-140M-6B-tokens-checkpoint +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61c396584b91d658673447d0/aVa9cr-FGf_EPGEFUc75W.mp4'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-03-03 17:19:44,2025-03-03 17:19:44.648,[],/posts/mkurman/857844914709353,567,"{'language': 'en', 'probability': 0.9011061787605286}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/655a505750b9a14799164a3a/TPbltGUpcxJW_LNLWGSpu.jpeg,18.0,Eliseu Silva,elismasilva,752242610998926,"[{'type': 'text', 'value': 'MoD ControlNet Tile Upscaler for SDXL: Upscale Your Images with Ease! 🚀', 'raw': 'MoD ControlNet Tile Upscaler for SDXL: Upscale Your Images with Ease! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Meet the MoD ControlNet Tile Upscaler for SDXL, a powerful tool that uses advanced technology to upscale your images without losing quality! Our app is designed to process images in tiles without leaving them blurry or with visible lines between the tiles. The result? Upscaled images with preserved details and smooth, natural transitions—all through a user-friendly interface. ✨', 'raw': 'Meet the MoD ControlNet Tile Upscaler for SDXL, a powerful tool that uses advanced technology to upscale your images without losing quality! Our app is designed to process images in tiles without leaving them blurry or with visible lines between the tiles. The result? Upscaled images with preserved details and smooth, natural transitions—all through a user-friendly interface. ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What MoD Upscaler Offers:', 'raw': 'What MoD Upscaler Offers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Preserved Details: Unlike traditional upscalers, the MoD ControlNet Tile Upscaler enlarges your images while maintaining clarity and adding details that might otherwise be lost. Your photos gain more definition without sacrificing original quality.', 'raw': '🔍 Preserved Details: Unlike traditional upscalers, the MoD ControlNet Tile Upscaler enlarges your images while maintaining clarity and adding details that might otherwise be lost. Your photos gain more definition without sacrificing original quality.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧩 Advanced Tiling Technology: We use a smart combination of techniques to ensure natural and smooth transitions between tiles. This means your upscaled images remain consistent and high-quality, even at higher resolutions. No more visible lines or imperfections!', 'raw': '🧩 Advanced Tiling Technology: We use a smart combination of techniques to ensure natural and smooth transitions between tiles. This means your upscaled images remain consistent and high-quality, even at higher resolutions. No more visible lines or imperfections!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡ Fast and Efficient: You don’t need a super-powered computer! Our app is optimized to run quickly and smoothly, even on simpler machines.', 'raw': '⚡ Fast and Efficient: You don’t need a super-powered computer! Our app is optimized to run quickly and smoothly, even on simpler machines.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Easy-to-Use Interface: You don’t have to be an expert to use the MoD ControlNet Tile Upscaler. The interface is simple, intuitive, and designed so anyone can achieve professional-quality results without hassle.', 'raw': '🎨 Easy-to-Use Interface: You don’t have to be an expert to use the MoD ControlNet Tile Upscaler. The interface is simple, intuitive, and designed so anyone can achieve professional-quality results without hassle.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Upscale your images without losing quality and with details preserved. Try the MoD ControlNet Tile Upscaler today! 👍', 'raw': 'Upscale your images without losing quality and with details preserved. Try the MoD ControlNet Tile Upscaler today! 👍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo App: ', 'raw': 'Demo App: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'elismasilva/mod-control-tile-upscaler-sdxl'}, 'url': 'https://huggingface.co/spaces/elismasilva/mod-control-tile-upscaler-sdxl', 'raw': 'https://huggingface.co/spaces/elismasilva/mod-control-tile-upscaler-sdxl'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Github Code: ', 'raw': 'Github Code: '}, {'type': 'link', 'href': 'https://github.com/DEVAIEXP/mod-control-tile-upscaler-sdxl', 'raw': 'https://github.com/DEVAIEXP/mod-control-tile-upscaler-sdxl'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We use Gradio amazing interfaces.', 'raw': 'We use Gradio amazing interfaces.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We use Hugging Face Diffusers to build this tool and Hugging Face Spaces to run this demo. ', 'raw': 'We use Hugging Face Diffusers to build this tool and Hugging Face Spaces to run this demo. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thank you all! 🙏', 'raw': 'Thank you all! 🙏'}]","MoD ControlNet Tile Upscaler for SDXL: Upscale Your Images with Ease! 🚀 + +Meet the MoD ControlNet Tile Upscaler for SDXL, a powerful tool that uses advanced technology to upscale your images without losing quality! Our app is designed to process images in tiles without leaving them blurry or with visible lines between the tiles. The result? Upscaled images with preserved details and smooth, natural transitions—all through a user-friendly interface. ✨ + +What MoD Upscaler Offers: + +🔍 Preserved Details: Unlike traditional upscalers, the MoD ControlNet Tile Upscaler enlarges your images while maintaining clarity and adding details that might otherwise be lost. Your photos gain more definition without sacrificing original quality. +🧩 Advanced Tiling Technology: We use a smart combination of techniques to ensure natural and smooth transitions between tiles. This means your upscaled images remain consistent and high-quality, even at higher resolutions. No more visible lines or imperfections! +⚡ Fast and Efficient: You don’t need a super-powered computer! Our app is optimized to run quickly and smoothly, even on simpler machines. +🎨 Easy-to-Use Interface: You don’t have to be an expert to use the MoD ControlNet Tile Upscaler. The interface is simple, intuitive, and designed so anyone can achieve professional-quality results without hassle. +Upscale your images without losing quality and with details preserved. Try the MoD ControlNet Tile Upscaler today! 👍 + +Demo App: https://huggingface.co/spaces/elismasilva/mod-control-tile-upscaler-sdxl +Github Code: https://github.com/DEVAIEXP/mod-control-tile-upscaler-sdxl + +We use Gradio amazing interfaces. +We use Hugging Face Diffusers to build this tool and Hugging Face Spaces to run this demo. + +Thank you all! 🙏","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/655a505750b9a14799164a3a/dxp6hGZiFCOcFJbYxRitw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/655a505750b9a14799164a3a/Mn4UNjojRj4ja-9kSk3im.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/655a505750b9a14799164a3a/txxaXGkqdEJm3RmYLya57.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/655a505750b9a14799164a3a/9NaXxJyCiwBoX5kHA9hQo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/655a505750b9a14799164a3a/UqlQo5H0bdKRqnaQz3OHO.png'}]",[],"[{'reaction': '🔥', 'users': ['victor', 'John6666', 'entityinarray', 'TahirC', 'rutesc', 'dantezxcd'], 'count': 6}, {'reaction': '🚀', 'users': ['elismasilva', 'John6666', 'entityinarray', 'rutesc', 'dantezxcd'], 'count': 5}]",2025-03-03 13:44:47,2025-03-06 00:16:20.985,[],/posts/elismasilva/752242610998926,2918,"{'language': 'en', 'probability': 0.8195925354957581}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,776127015570604,"[{'type': 'text', 'value': 'Exciting releases from the Chinese community this February🔥 ', 'raw': 'Exciting releases from the Chinese community this February🔥 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://huggingface.co/collections/zh-ai-community/2025-february-67a35aaa68e97812def5b6ef', 'raw': 'https://huggingface.co/collections/zh-ai-community/2025-february-67a35aaa68e97812def5b6ef'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MLLM: ', 'raw': 'MLLM: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Ovis2 by Alibaba ', 'raw': '✨ Ovis2 by Alibaba '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'AIDC-AI/ovis2-67ab36c7e497429034874464'}, 'url': 'https://huggingface.co/collections/AIDC-AI/ovis2-67ab36c7e497429034874464', 'raw': 'https://huggingface.co/collections/AIDC-AI/ovis2-67ab36c7e497429034874464'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Step Audio Chat by StepFun AI', 'raw': '✨ Step Audio Chat by StepFun AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'stepfun-ai/step-audio-67b33accf45735bb21131b0b'}, 'url': 'https://huggingface.co/collections/stepfun-ai/step-audio-67b33accf45735bb21131b0b', 'raw': 'https://huggingface.co/collections/stepfun-ai/step-audio-67b33accf45735bb21131b0b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Audio: ', 'raw': 'Audio: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Step Audio TTS by StepFunAI', 'raw': '✨ Step Audio TTS by StepFunAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'stepfun-ai/Step-Audio-TTS-3B'}, 'url': 'https://huggingface.co/stepfun-ai/Step-Audio-TTS-3B', 'raw': 'https://huggingface.co/stepfun-ai/Step-Audio-TTS-3B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ InspireMusic by Alibaba', 'raw': '✨ InspireMusic by Alibaba'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'FunAudioLLM'}, 'url': 'https://huggingface.co/FunAudioLLM', 'raw': 'https://huggingface.co/FunAudioLLM', 'image': 'https://www.gravatar.com/avatar/fc0bfb498456471b99375b14ae1f21f8?d=retro&size=100'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Baichuan Audio by BaichuanAI ', 'raw': '✨ Baichuan Audio by BaichuanAI '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'baichuan-inc/Baichuan-Audio-Instruct'}, 'url': 'https://huggingface.co/baichuan-inc/Baichuan-Audio-Instruct', 'raw': 'https://huggingface.co/baichuan-inc/Baichuan-Audio-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video: ', 'raw': 'Video: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Wan2.1 by Alibaba_Wan', 'raw': '✨ Wan2.1 by Alibaba_Wan'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Wan-AI/Wan2.1-T2V-14B'}, 'url': 'https://huggingface.co/Wan-AI/Wan2.1-T2V-14B', 'raw': 'https://huggingface.co/Wan-AI/Wan2.1-T2V-14B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Stepvideo-T2V by StepFun AI', 'raw': '✨ Stepvideo-T2V by StepFun AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'stepfun-ai/stepvideo-t2v'}, 'url': 'https://huggingface.co/stepfun-ai/stepvideo-t2v', 'raw': 'https://huggingface.co/stepfun-ai/stepvideo-t2v'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ SkyReels-V1 by Skywork', 'raw': '✨ SkyReels-V1 by Skywork'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Skywork/skyreels-v1-67b34676ff65b4ec02d16307'}, 'url': 'https://huggingface.co/collections/Skywork/skyreels-v1-67b34676ff65b4ec02d16307', 'raw': 'https://huggingface.co/collections/Skywork/skyreels-v1-67b34676ff65b4ec02d16307'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ LLaDA-8B by RenminUniversity', 'raw': '✨ LLaDA-8B by RenminUniversity'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'GSAI-ML/LLaDA-8B-Instruct'}, 'url': 'https://huggingface.co/GSAI-ML/LLaDA-8B-Instruct', 'raw': 'https://huggingface.co/GSAI-ML/LLaDA-8B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MoE: ', 'raw': 'MoE: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Moonlight-16B by MoonshotAI (Kimi)', 'raw': '✨ Moonlight-16B by MoonshotAI (Kimi)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'moonshotai/Moonlight-16B-A3B-Instruct'}, 'url': 'https://huggingface.co/moonshotai/Moonlight-16B-A3B-Instruct', 'raw': 'https://huggingface.co/moonshotai/Moonlight-16B-A3B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Reasoning: ', 'raw': 'Reasoning: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ TinyR1-32B by Qihoo360', 'raw': '✨ TinyR1-32B by Qihoo360'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'qihoo360/TinyR1-32B-Preview'}, 'url': 'https://huggingface.co/qihoo360/TinyR1-32B-Preview', 'raw': 'https://huggingface.co/qihoo360/TinyR1-32B-Preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Chinese DeepSeek R1-Distill data -110k ', 'raw': '✨ Chinese DeepSeek R1-Distill data -110k '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Congliu/Chinese-DeepSeek-R1-Distill-data-110k'}, 'url': 'https://huggingface.co/datasets/Congliu/Chinese-DeepSeek-R1-Distill-data-110k', 'raw': 'https://huggingface.co/datasets/Congliu/Chinese-DeepSeek-R1-Distill-data-110k'}, {'type': 'new_line', 'raw': '\n'}]","Exciting releases from the Chinese community this February🔥 +👉 https://huggingface.co/collections/zh-ai-community/2025-february-67a35aaa68e97812def5b6ef + +MLLM: +✨ Ovis2 by Alibaba +https://huggingface.co/collections/AIDC-AI/ovis2-67ab36c7e497429034874464 +✨ Step Audio Chat by StepFun AI + https://huggingface.co/collections/stepfun-ai/step-audio-67b33accf45735bb21131b0b + +Audio: +✨ Step Audio TTS by StepFunAI + https://huggingface.co/stepfun-ai/Step-Audio-TTS-3B +✨ InspireMusic by Alibaba +https://huggingface.co/FunAudioLLM +✨ Baichuan Audio by BaichuanAI +https://huggingface.co/baichuan-inc/Baichuan-Audio-Instruct + +Video: +✨ Wan2.1 by Alibaba_Wan + https://huggingface.co/Wan-AI/Wan2.1-T2V-14B +✨ Stepvideo-T2V by StepFun AI + https://huggingface.co/stepfun-ai/stepvideo-t2v +✨ SkyReels-V1 by Skywork +https://huggingface.co/collections/Skywork/skyreels-v1-67b34676ff65b4ec02d16307 +✨ LLaDA-8B by RenminUniversity + https://huggingface.co/GSAI-ML/LLaDA-8B-Instruct + +MoE: +✨ Moonlight-16B by MoonshotAI (Kimi) + https://huggingface.co/moonshotai/Moonlight-16B-A3B-Instruct + +Reasoning: +✨ TinyR1-32B by Qihoo360 + https://huggingface.co/qihoo360/TinyR1-32B-Preview + +Dataset: +✨ Chinese DeepSeek R1-Distill data -110k +https://huggingface.co/datasets/Congliu/Chinese-DeepSeek-R1-Distill-data-110k +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/GxUG6G5flWZcLWv1D6REe.jpeg'}]",[],"[{'reaction': '😎', 'users': ['sudanenator', 'reach-vb', 'John6666', 'prithivMLmods', 'Yukkkop', 'bndp'], 'count': 6}, {'reaction': '🔥', 'users': ['reach-vb', 'elismasilva', 'alvanlii', 'TahirC', 'dantezxcd'], 'count': 5}, {'reaction': '🚀', 'users': ['reach-vb', 'dantezxcd'], 'count': 2}]",2025-03-03 09:09:11,2025-03-03 09:09:11.216,[],/posts/AdinaY/776127015570604,4067,"{'language': 'en', 'probability': 0.6472523212432861}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,233650928001403,"[{'type': 'text', 'value': 'I think we have released the best Arabic model under 25B at least based on ', 'raw': 'I think we have released the best Arabic model under 25B at least based on '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/inceptionai/AraGen-Leaderboard', 'raw': 'https://huggingface.co/spaces/inceptionai/AraGen-Leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Yehia = ', 'raw': 'Yehia = '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ALLaM-AI/ALLaM-7B-Instruct-preview'}, 'url': 'https://huggingface.co/ALLaM-AI/ALLaM-7B-Instruct-preview', 'raw': 'https://huggingface.co/ALLaM-AI/ALLaM-7B-Instruct-preview'}, {'type': 'text', 'value': ' + GRPO', 'raw': ' + GRPO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and its ranked number one model under the 25B parameter size mark.', 'raw': 'and its ranked number one model under the 25B parameter size mark.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now, i said ""i think"" not ""i am sure"" because this model used the same metric of evaluation the AraGen developers use (the 3C3H) as a reward model to improve its responses and this sparks the question. Is this something good for users or is it another type of overfitting that we don\'t want ?', 'raw': 'Now, i said ""i think"" not ""i am sure"" because this model used the same metric of evaluation the AraGen developers use (the 3C3H) as a reward model to improve its responses and this sparks the question. Is this something good for users or is it another type of overfitting that we don\'t want ?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I don't know if this is a good thing or a bad thing but what i know is that you can try it from here:"", 'raw': ""I don't know if this is a good thing or a bad thing but what i know is that you can try it from here:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Navid-AI/Yehia-7B-preview'}, 'url': 'https://huggingface.co/spaces/Navid-AI/Yehia-7B-preview', 'raw': 'https://huggingface.co/spaces/Navid-AI/Yehia-7B-preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'or Download it for your personal experiments from here:', 'raw': 'or Download it for your personal experiments from here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Navid-AI/Yehia-7B-preview'}, 'url': 'https://huggingface.co/Navid-AI/Yehia-7B-preview', 'raw': 'https://huggingface.co/Navid-AI/Yehia-7B-preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ramadan Kareem 🌙', 'raw': 'Ramadan Kareem 🌙'}]","I think we have released the best Arabic model under 25B at least based on https://huggingface.co/spaces/inceptionai/AraGen-Leaderboard + +Yehia = https://huggingface.co/ALLaM-AI/ALLaM-7B-Instruct-preview + GRPO + +and its ranked number one model under the 25B parameter size mark. + +Now, i said ""i think"" not ""i am sure"" because this model used the same metric of evaluation the AraGen developers use (the 3C3H) as a reward model to improve its responses and this sparks the question. Is this something good for users or is it another type of overfitting that we don't want ? + +I don't know if this is a good thing or a bad thing but what i know is that you can try it from here: +https://huggingface.co/spaces/Navid-AI/Yehia-7B-preview + +or Download it for your personal experiments from here: +https://huggingface.co/Navid-AI/Yehia-7B-preview + +Ramadan Kareem 🌙","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6116d0584ef9fdfbf45dc4d9/lJMRoKawi4W4TEK8hThuT.png'}]",[],"[{'reaction': '❤️', 'users': ['MohamedRashad', 'John6666', 'Yehor', 'johnlockejrr', 'victor', 'MohammedNaeem', 'MAsad789565', 'fars10', 'el-walid', 'hamzashahid40', 'dantezxcd'], 'count': 11}]",2025-03-03 07:59:37,2025-03-05 04:56:53.324,"[{'_id': '64dd9a722e086931f67127ee', 'avatarUrl': '/avatars/9789c9291ae4ac53b532f8fe8581047a.svg', 'fullname': 'M Asad Iqbal', 'name': 'MAsad789565', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/MohamedRashad/233650928001403,3514,"{'language': 'en', 'probability': 0.8981483578681946}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63c09b32dd793d5a62895a95/SFdGQeiZpD5oxkl66wK2u.jpeg,48.0,Duskfall Crew,Duskfallcrew,302989213829814,"[{'type': 'text', 'value': 'New Loras are in the EarthnDusk Repo Here:https://huggingface.co/EarthnDusk/Loras_2025', 'raw': 'New Loras are in the EarthnDusk Repo Here:https://huggingface.co/EarthnDusk/Loras_2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""For those of you wondering what certain content is in the repos and don't want to ask questions in the issues area:"", 'raw': ""For those of you wondering what certain content is in the repos and don't want to ask questions in the issues area:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://discord.gg/HhBSvM9gBY', 'raw': 'https://discord.gg/HhBSvM9gBY'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Come hang out, share your ML/AI goodies, see what projects are going on-', 'raw': 'Come hang out, share your ML/AI goodies, see what projects are going on-'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Maybe even partner up with the server!', 'raw': 'Maybe even partner up with the server!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More content of ours is linked in my collection here:', 'raw': 'More content of ours is linked in my collection here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Duskfallcrew/earth-and-dusk-repos-67c3ace7546e6c74c7069c83'}, 'url': 'https://huggingface.co/collections/Duskfallcrew/earth-and-dusk-repos-67c3ace7546e6c74c7069c83', 'raw': 'https://huggingface.co/collections/Duskfallcrew/earth-and-dusk-repos-67c3ace7546e6c74c7069c83'}]","New Loras are in the EarthnDusk Repo Here:https://huggingface.co/EarthnDusk/Loras_2025 +For those of you wondering what certain content is in the repos and don't want to ask questions in the issues area: +https://discord.gg/HhBSvM9gBY + +Come hang out, share your ML/AI goodies, see what projects are going on- +Maybe even partner up with the server! + +More content of ours is linked in my collection here: +https://huggingface.co/collections/Duskfallcrew/earth-and-dusk-repos-67c3ace7546e6c74c7069c83","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c09b32dd793d5a62895a95/ifw1e-DWJKdJNtCfD-ydk.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c09b32dd793d5a62895a95/mrMpqY5ey951zzBi484Bo.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c09b32dd793d5a62895a95/dHFBThKiXDHEt-WCozFZX.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c09b32dd793d5a62895a95/fMw2q9aGsS3qa3NNKjoEy.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c09b32dd793d5a62895a95/dIq_9Qjg0dDGXna4CApb1.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63c09b32dd793d5a62895a95/9r4ubhbn42V-2HeyVu6zO.jpeg'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}, {'reaction': '❤️', 'users': ['John6666'], 'count': 1}]",2025-03-03 05:51:25,2025-03-03 07:37:29.801,"[{'_id': '67c55781a0e3b22b0b26e062', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/3mG36NXwIpQ6NcPchBjQ2.jpeg', 'fullname': 'testa finda', 'name': 'testafinda', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Duskfallcrew/302989213829814,719,"{'language': 'en', 'probability': 0.860573947429657}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,949994434289362,"[{'type': 'resource', 'resource': {'type': 'paper', 'id': '2411.18203'}, 'url': 'https://huggingface.co/papers/2411.18203', 'raw': 'https://huggingface.co/papers/2411.18203', 'label': 'Critic-V: VLM Critics Help Catch VLM Errors in Multimodal Reasoning (2411.18203)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Critic-V has been accepted by CVPR2025!', 'raw': 'Critic-V has been accepted by CVPR2025!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Bonus! VRI-160K uploaded now!', 'raw': 'Bonus! VRI-160K uploaded now!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'di-zhang-fdu/R1-Vision-Reasoning-Instructions'}, 'url': 'https://huggingface.co/datasets/di-zhang-fdu/R1-Vision-Reasoning-Instructions', 'raw': 'https://huggingface.co/datasets/di-zhang-fdu/R1-Vision-Reasoning-Instructions'}]","https://huggingface.co/papers/2411.18203 +Critic-V has been accepted by CVPR2025! +Bonus! VRI-160K uploaded now! +https://huggingface.co/datasets/di-zhang-fdu/R1-Vision-Reasoning-Instructions",[],[],"[{'reaction': '🔥', 'users': ['merterbak', 'John6666', 'jwu323', 'dantezxcd'], 'count': 4}]",2025-03-03 05:44:08,2025-03-03 07:04:29.051,[],/posts/di-zhang-fdu/949994434289362,2957,"{'language': 'en', 'probability': 0.8556229472160339}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,636972739498133,"[{'type': 'text', 'value': 'Is this the best tool to extract clean info from PDFs, handwriting and complex documents yet? ', 'raw': 'Is this the best tool to extract clean info from PDFs, handwriting and complex documents yet? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Open source olmOCR just dropped and the results are impressive.', 'raw': 'Open source olmOCR just dropped and the results are impressive.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Tested the free demo with various documents, including a handwritten Claes Oldenburg letter. The speed is impressive: 3000 tokens/second on your own GPU - that's 1/32 the cost of GPT-4o ($190/million pages). Game-changer for content extraction and digital archives."", 'raw': ""Tested the free demo with various documents, including a handwritten Claes Oldenburg letter. The speed is impressive: 3000 tokens/second on your own GPU - that's 1/32 the cost of GPT-4o ($190/million pages). Game-changer for content extraction and digital archives.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To achieve this, Ai2 trained a 7B vision language model on 260K pages from 100K PDFs using ""document anchoring"" - combining PDF metadata with page images.', 'raw': 'To achieve this, Ai2 trained a 7B vision language model on 260K pages from 100K PDFs using ""document anchoring"" - combining PDF metadata with page images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Best part: it actually understands document structure (columns, tables, equations) instead of just jumbling everything together like most OCR tools. Their human eval results back this up.', 'raw': 'Best part: it actually understands document structure (columns, tables, equations) instead of just jumbling everything together like most OCR tools. Their human eval results back this up.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Try the demo: ', 'raw': '👉 Try the demo: '}, {'type': 'link', 'href': 'https://olmocr.allenai.org', 'raw': 'https://olmocr.allenai.org'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Going right into the AI toolkit: ', 'raw': 'Going right into the AI toolkit: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'JournalistsonHF/ai-toolkit'}, 'url': 'https://huggingface.co/spaces/JournalistsonHF/ai-toolkit', 'raw': 'https://huggingface.co/spaces/JournalistsonHF/ai-toolkit'}]","Is this the best tool to extract clean info from PDFs, handwriting and complex documents yet? + +Open source olmOCR just dropped and the results are impressive. + +Tested the free demo with various documents, including a handwritten Claes Oldenburg letter. The speed is impressive: 3000 tokens/second on your own GPU - that's 1/32 the cost of GPT-4o ($190/million pages). Game-changer for content extraction and digital archives. + +To achieve this, Ai2 trained a 7B vision language model on 260K pages from 100K PDFs using ""document anchoring"" - combining PDF metadata with page images. + +Best part: it actually understands document structure (columns, tables, equations) instead of just jumbling everything together like most OCR tools. Their human eval results back this up. + +👉 Try the demo: https://olmocr.allenai.org + +Going right into the AI toolkit: https://huggingface.co/spaces/JournalistsonHF/ai-toolkit","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/Lvg0y6RuQHI3NHiqt2EIQ.qt'}]",[],"[{'reaction': '🔥', 'users': ['DonkeySmall', 'elfpuck', 'jfey', 'prasiyer', 'John6666', 'Ameeeee', 'Davidsv', 'dantezxcd'], 'count': 8}, {'reaction': '👍', 'users': ['sethderrick', 'Davidsv', 'SpyC0der77', 'dantezxcd'], 'count': 4}]",2025-02-26 23:02:38,2025-02-28 07:36:38.735,"[{'_id': '64b104039ac3f3dd267809db', 'avatarUrl': '/avatars/eb11aecedc5ee4412e97246a5fb3721f.svg', 'fullname': 'Irfan Kheiri', 'name': 'kheiri', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/fdaudens/636972739498133,3136,"{'language': 'en', 'probability': 0.8014965057373047}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png,3314.0,ben burtenshaw,burtenshaw,352638065928004,"[{'type': 'text', 'value': 'Now the Hugging Face agent course is getting real! With frameworks like smolagents, LlamaIndex, and LangChain.', 'raw': 'Now the Hugging Face agent course is getting real! With frameworks like smolagents, LlamaIndex, and LangChain.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Follow the org for updates ', 'raw': '🔗 Follow the org for updates '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'agents-course'}, 'url': 'https://huggingface.co/agents-course', 'raw': 'https://huggingface.co/agents-course', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60cae820b1c79a3e4b436664/gSOeYxuhnE0U0HFeh__wA.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This week we are releasing the first framework unit in the course and it’s on smolagents. This is what the unit covers:', 'raw': 'This week we are releasing the first framework unit in the course and it’s on smolagents. This is what the unit covers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- why should you use smolagents vs another library?', 'raw': '- why should you use smolagents vs another library?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- how to build agents that use code', 'raw': '- how to build agents that use code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- build multiagents systems', 'raw': '- build multiagents systems'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- use vision language models for browser use', 'raw': '- use vision language models for browser use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The team has been working flat out on this for a few weeks. Led by ', 'raw': 'The team has been working flat out on this for a few weeks. Led by '}, {'type': 'mention', 'user': 'sergiopaniego', 'raw': '@sergiopaniego'}, {'type': 'text', 'value': ' and supported by smolagents author ', 'raw': ' and supported by smolagents author '}, {'type': 'mention', 'user': 'm-ric', 'raw': '@m-ric'}, {'type': 'text', 'value': '.', 'raw': '.'}]","Now the Hugging Face agent course is getting real! With frameworks like smolagents, LlamaIndex, and LangChain. + +🔗 Follow the org for updates https://huggingface.co/agents-course + +This week we are releasing the first framework unit in the course and it’s on smolagents. This is what the unit covers: + +- why should you use smolagents vs another library? +- how to build agents that use code +- build multiagents systems +- use vision language models for browser use + +The team has been working flat out on this for a few weeks. Led by @sergiopaniego and supported by smolagents author @m-ric.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62d648291fa3e4e7ae3fa6e8/70ak0wCHyRr2aDKroDaHA.png'}]","[{'_id': '63d10d4e8eaa4831005e92b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg', 'fullname': 'Aymeric Roucher', 'name': 'm-ric', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1652}, {'_id': '61929226ded356549e20c5da', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61929226ded356549e20c5da/ONUjP2S5fUWd07BiFXm0i.jpeg', 'fullname': 'Sergio Paniego', 'name': 'sergiopaniego', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 874}]","[{'reaction': '🔥', 'users': ['sergiopaniego', 'fractalego', 'makarandprabhu', 'AdinaY', 'linoyts', 'John6666', 'theainerd', 'FranckAbgrall', 'EdwinSanDev', 'sreenivas-rao', 'prithivMLmods', 'merterbak', 'PuristanLabs1', 'Uday', 'elfpuck', 'EquinoxElahin', 'Lukdar', 'nickprock', 'SergeySasnouski', 'pjfernan'], 'count': 20}, {'reaction': '🤗', 'users': ['John6666', 'oieieio', 'nickprock', 'Tues23', 'dantezxcd'], 'count': 5}, {'reaction': '😎', 'users': ['John6666', 'adcg', 'dantezxcd'], 'count': 3}, {'reaction': '👍', 'users': ['senthoorvisakan'], 'count': 1}]",2025-02-26 09:01:10,2025-02-26 09:01:10.871,[],/posts/burtenshaw/352638065928004,6475,"{'language': 'en', 'probability': 0.8753405213356018}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,923354082387927,"[{'type': 'text', 'value': '🚀 Introducing MOUSE: Space Research Thinking on HuggingFace Spaces', 'raw': '🚀 Introducing MOUSE: Space Research Thinking on HuggingFace Spaces'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 How to Get Started', 'raw': '🚀 How to Get Started'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/spaces-research-think'}, 'url': 'https://huggingface.co/spaces/ginipick/spaces-research-think', 'raw': 'https://huggingface.co/spaces/ginipick/spaces-research-think'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Welcome to **MOUSE: Space Research Thinking** – an innovative HuggingFace Spaces project designed to transform how you analyze and interact with Python code. Whether you're a developer, researcher, or simply passionate about coding, this tool provides state-of-the-art analysis, summarization, and usage guidance, all powered by advanced AI."", 'raw': ""Welcome to **MOUSE: Space Research Thinking** – an innovative HuggingFace Spaces project designed to transform how you analyze and interact with Python code. Whether you're a developer, researcher, or simply passionate about coding, this tool provides state-of-the-art analysis, summarization, and usage guidance, all powered by advanced AI.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '---', 'raw': '---'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🌟 Key Features', 'raw': '## 🌟 Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Real-Time Code Analysis** ', 'raw': '- **Real-Time Code Analysis** '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Instantly dissect your Python code to reveal its structure, functionality, and potential applications. Our tool delivers:', 'raw': ' Instantly dissect your Python code to reveal its structure, functionality, and potential applications. Our tool delivers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Background & Necessity**: Understand the context behind the code.', 'raw': ' - **Background & Necessity**: Understand the context behind the code.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Functional Utility & Value**: Highlight core functionalities and benefits.', 'raw': ' - **Functional Utility & Value**: Highlight core functionalities and benefits.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Distinctive Features**: Discover what sets the project apart.', 'raw': ' - **Distinctive Features**: Discover what sets the project apart.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Target Audience & Applications**: Identify who can benefit and how.', 'raw': ' - **Target Audience & Applications**: Identify who can benefit and how.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Expected Impact**: Envision the improvements and innovations the code can drive. ', 'raw': ' - **Expected Impact**: Envision the improvements and innovations the code can drive. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' 🔍', 'raw': ' 🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Visual File Structure Overview** ', 'raw': '- **Visual File Structure Overview** '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Navigate your project with ease! A dynamic tree-view displays your file hierarchy in a clear, intuitive format, allowing you to explore directories and files effortlessly. 🌲', 'raw': ' Navigate your project with ease! A dynamic tree-view displays your file hierarchy in a clear, intuitive format, allowing you to explore directories and files effortlessly. 🌲'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Interactive Usage Guide** ', 'raw': '- **Interactive Usage Guide** '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Receive step-by-step instructions and practical tips on using the tool effectively. Our AI assistant explains everything in an engaging, user-friendly manner, ensuring a smooth learning curve. 💡', 'raw': ' Receive step-by-step instructions and practical tips on using the tool effectively. Our AI assistant explains everything in an engaging, user-friendly manner, ensuring a smooth learning curve. 💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **AI-Powered Code Chat** ', 'raw': '- **AI-Powered Code Chat** '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Engage in real-time conversations with our AI. Ask questions, request detailed explanations, or dive deeper into code specifics with a chat interface that makes complex topics accessible. 🤖💬', 'raw': ' Engage in real-time conversations with our AI. Ask questions, request detailed explanations, or dive deeper into code specifics with a chat interface that makes complex topics accessible. 🤖💬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Customizable Experience** ', 'raw': '- **Customizable Experience** '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Tailor the analysis to your needs with adjustable parameters like token limits and response temperatures, enabling both concise summaries and in-depth explorations. ⚙️', 'raw': ' Tailor the analysis to your needs with adjustable parameters like token limits and response temperatures, enabling both concise summaries and in-depth explorations. ⚙️'}]","🚀 Introducing MOUSE: Space Research Thinking on HuggingFace Spaces + +🚀 How to Get Started +https://huggingface.co/spaces/ginipick/spaces-research-think + +Welcome to **MOUSE: Space Research Thinking** – an innovative HuggingFace Spaces project designed to transform how you analyze and interact with Python code. Whether you're a developer, researcher, or simply passionate about coding, this tool provides state-of-the-art analysis, summarization, and usage guidance, all powered by advanced AI. + +--- + +## 🌟 Key Features + +- **Real-Time Code Analysis** + Instantly dissect your Python code to reveal its structure, functionality, and potential applications. Our tool delivers: + - **Background & Necessity**: Understand the context behind the code. + - **Functional Utility & Value**: Highlight core functionalities and benefits. + - **Distinctive Features**: Discover what sets the project apart. + - **Target Audience & Applications**: Identify who can benefit and how. + - **Expected Impact**: Envision the improvements and innovations the code can drive. + 🔍 + +- **Visual File Structure Overview** + Navigate your project with ease! A dynamic tree-view displays your file hierarchy in a clear, intuitive format, allowing you to explore directories and files effortlessly. 🌲 + +- **Interactive Usage Guide** + Receive step-by-step instructions and practical tips on using the tool effectively. Our AI assistant explains everything in an engaging, user-friendly manner, ensuring a smooth learning curve. 💡 + +- **AI-Powered Code Chat** + Engage in real-time conversations with our AI. Ask questions, request detailed explanations, or dive deeper into code specifics with a chat interface that makes complex topics accessible. 🤖💬 + +- **Customizable Experience** + Tailor the analysis to your needs with adjustable parameters like token limits and response temperatures, enabling both concise summaries and in-depth explorations. ⚙️","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/POJgpZyWbyETx3kw4C_Qz.png'}]",[],"[{'reaction': '🔥', 'users': ['ginipick', 'jammerisue', 'algebradavid', 'ineverriver', 'victoresmart', 'seawolf2357', 'aiqcamp', 'gunship999', 'aiqtech', 'fantos', 'fantaxy', 'immunobiotech', 'openfree', 'cutechicken', 'kolaslab', 'mason7278', 'faruqhrp', 'John6666', 'EdwinSanDev', 'Fishtiks', 'b0urnvita', 'icalab', 'utitupem', 'mibelunker', 'brabebird', 'ainagua', 'dantezxcd'], 'count': 27}, {'reaction': '🚀', 'users': ['jammerisue', 'algebradavid', 'ineverriver', 'victoresmart', 'seawolf2357', 'gunship999', 'aiqtech', 'fantos', 'fantaxy', 'immunobiotech', 'ginipick', 'kolaslab', 'Bananenman2002', 'openfree', 'icalab', 'utitupem', 'mibelunker', 'ainagua', 'brabebird', 'dantezxcd'], 'count': 20}, {'reaction': '👀', 'users': ['jammerisue', 'algebradavid', 'ineverriver', 'victoresmart', 'gunship999', 'aiqtech', 'fantos', 'fantaxy', 'immunobiotech', 'ginipick', 'kolaslab', 'openfree', 'icalab', 'utitupem', 'mibelunker', 'brabebird'], 'count': 16}, {'reaction': '❤️', 'users': ['jammerisue', 'algebradavid', 'ineverriver', 'gunship999', 'fantos', 'immunobiotech', 'ginipick', 'kolaslab', 'aiqtech', 'Akseltinfat', 'openfree', 'ainagua', 'brabebird'], 'count': 13}, {'reaction': '🤗', 'users': ['jammerisue', 'algebradavid', 'gunship999', 'fantos', 'immunobiotech', 'ginipick', 'aiqtech', 'openfree'], 'count': 8}, {'reaction': '👍', 'users': ['jammerisue', 'aiqcamp', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 7}, {'reaction': '😔', 'users': ['jammerisue', 'aiqcamp', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 7}, {'reaction': '🤯', 'users': ['jammerisue', 'aiqcamp', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 7}, {'reaction': '🤝', 'users': ['jammerisue', 'aiqcamp', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 7}, {'reaction': '🧠', 'users': ['jammerisue', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 6}, {'reaction': '➕', 'users': ['jammerisue', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 6}, {'reaction': '😎', 'users': ['jammerisue', 'immunobiotech', 'ginipick', 'aiqtech', 'algebradavid', 'openfree'], 'count': 6}]",2025-02-26 07:16:30,2025-02-26 11:14:39.921,"[{'_id': '67bed6a2e81fc90e2610e8c6', 'avatarUrl': '/avatars/fa9fd5300905a199fc57df4292bb07b1.svg', 'fullname': 'jack james', 'name': 'hussainnkg64', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/ginipick/923354082387927,6462,"{'language': 'en', 'probability': 0.7886224985122681}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64d1129297ca59bcf7458d07/54J83Jv0Mk2CEX73Bx1_g.jpeg,81.0,Manav Majumdar,smirki,817475984441874,"[{'type': 'text', 'value': 'Hey! What kind of models do you guys want to see?', 'raw': 'Hey! What kind of models do you guys want to see?'}]",Hey! What kind of models do you guys want to see?,[],[],"[{'reaction': '🚀', 'users': ['dantezxcd'], 'count': 1}]",2025-02-26 06:18:33,2025-02-28 10:18:34.992,"[{'_id': '6459daf94fe72fae522c5ef7', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6459daf94fe72fae522c5ef7/JLvH0yd0Cfn0qFgaM6KcB.jpeg', 'fullname': 'huggingkot', 'name': 'huggingkot', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '64d1129297ca59bcf7458d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64d1129297ca59bcf7458d07/54J83Jv0Mk2CEX73Bx1_g.jpeg', 'fullname': 'Manav Majumdar', 'name': 'smirki', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 81, 'isFollowing': False}, {'_id': '66e5e26f1fc6410cf61f5618', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/KMbWt2SYR04mIhizCvTLe.png', 'fullname': 'Saurav Dhiani', 'name': 'svsaurav95', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/smirki/817475984441874,1039,"{'language': 'en', 'probability': 0.9769014716148376}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,116274779602090,"[{'type': 'text', 'value': 'Necessity is mother of invention. To understand ⚡FlashMLA⚡ by ', 'raw': 'Necessity is mother of invention. To understand ⚡FlashMLA⚡ by '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐋DeepSeek 🐋, the first question to ask is why. ', 'raw': '🐋DeepSeek 🐋, the first question to ask is why. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The keyword here is H800, a lower-end product tailored for export control. The purpose here is to squeeze out as much performance as possible.', 'raw': 'The keyword here is H800, a lower-end product tailored for export control. The purpose here is to squeeze out as much performance as possible.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But here is the most important takeaway: this invention benefits EVERYONE.', 'raw': 'But here is the most important takeaway: this invention benefits EVERYONE.'}]","Necessity is mother of invention. To understand ⚡FlashMLA⚡ by +🐋DeepSeek 🐋, the first question to ask is why. + +The keyword here is H800, a lower-end product tailored for export control. The purpose here is to squeeze out as much performance as possible. + +But here is the most important takeaway: this invention benefits EVERYONE.",[],[],"[{'reaction': '🤗', 'users': ['ishaqsaviani', 'John6666', 'ShadowWolf1999', 'EdwinSanDev', 'chriswritescode', 'win10', 'b0urnvita', 'masterHV34', 'dantezxcd'], 'count': 9}, {'reaction': '🚀', 'users': ['DeathGodlike', 'xi0v', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['DeathGodlike'], 'count': 1}]",2025-02-26 03:09:45,2025-02-27 08:54:09.768,"[{'_id': '648c2585a58a58a8fc874f70', 'avatarUrl': '/avatars/4fbf997a890081d750725e127a8005e1.svg', 'fullname': 'Hosein Rastegar', 'name': 'devops724', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '67b8e0d79107c46e941a3fc8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/kjcJuaIcvP-1x4Hmf6LMt.png', 'fullname': 'Karen Akers', 'name': 'karenny', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/onekq/116274779602090,2768,"{'language': 'en', 'probability': 0.9624326229095459}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,137989260321113,"[{'type': 'text', 'value': '🚀 Blast into the future with ZaxxonGalaxian – a thrilling 3D action game where you navigate epic battles through towering 3D cityscapes! Face off against relentless swarm bots, climb the leaderboard, and dominate the skies. ', 'raw': '🚀 Blast into the future with ZaxxonGalaxian – a thrilling 3D action game where you navigate epic battles through towering 3D cityscapes! Face off against relentless swarm bots, climb the leaderboard, and dominate the skies. '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/ZaxxoGalaxian'}, 'url': 'https://huggingface.co/spaces/awacke1/ZaxxoGalaxian', 'raw': 'https://huggingface.co/spaces/awacke1/ZaxxoGalaxian'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","🚀 Blast into the future with ZaxxonGalaxian – a thrilling 3D action game where you navigate epic battles through towering 3D cityscapes! Face off against relentless swarm bots, climb the leaderboard, and dominate the skies. https://huggingface.co/spaces/awacke1/ZaxxoGalaxian ","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/RhJeyvNURYj0-LxZmTzMl.mp4'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'mason7278', 'dantezxcd'], 'count': 3}]",2025-02-26 02:41:13,2025-02-26 02:41:13.251,[],/posts/awacke1/137989260321113,2494,"{'language': 'en', 'probability': 0.8302603960037231}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,268157671870814,"[{'type': 'text', 'value': 'Wan 2.1 Ultra Advanced Gradio APP for - Works as low as 4GB VRAM - 1-Click Installers for Windows, RunPod, Massed Compute - Batch Processing - T2V - I2V - V2V', 'raw': 'Wan 2.1 Ultra Advanced Gradio APP for - Works as low as 4GB VRAM - 1-Click Installers for Windows, RunPod, Massed Compute - Batch Processing - T2V - I2V - V2V'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Installer and APP : ', 'raw': 'Installer and APP : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/123105403', 'raw': 'https://www.patreon.com/posts/123105403'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Download from here : ', 'raw': 'Download from here : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/123105403', 'raw': 'https://www.patreon.com/posts/123105403'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have been working 14 hours today to make this APP before sleeping for you guys :)', 'raw': 'I have been working 14 hours today to make this APP before sleeping for you guys :)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We have all the features of Wan 2.1 model', 'raw': 'We have all the features of Wan 2.1 model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Text to Video 1.3B (as low as 3.5 GB VRAM) - Really fast - 480x832px or 832x480px', 'raw': 'Text to Video 1.3B (as low as 3.5 GB VRAM) - Really fast - 480x832px or 832x480px'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video to Video 1.3B (as low as 3.5 GB VRAM) - Really fast - 480x832px or 832x480px', 'raw': 'Video to Video 1.3B (as low as 3.5 GB VRAM) - Really fast - 480x832px or 832x480px'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Text to Video 14B (as low as 17 GB VRAM) - still may work at below VRAM but slower - 720x1280px or 1280x720px', 'raw': 'Text to Video 14B (as low as 17 GB VRAM) - still may work at below VRAM but slower - 720x1280px or 1280x720px'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Image to Video 14B (as low as 17 GB VRAM) - still may work at below VRAM but slower - 720x1280px or 1280x720px', 'raw': 'Image to Video 14B (as low as 17 GB VRAM) - still may work at below VRAM but slower - 720x1280px or 1280x720px'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'When you analyze the above and below images', 'raw': 'When you analyze the above and below images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'First video is animated from the input image with following prompt', 'raw': 'First video is animated from the input image with following prompt'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""A hooded wraith stands motionless in a torrential downpour, lightning cracking across the stormy sky behind it. Its face is an impenetrable void of darkness beneath the tattered hood. Rain cascades down its ragged, flowing cloak, which appears to disintegrate into wisps of shadow at the edges. The mysterious figure holds an enormous sword of pure energy, crackling with electric blue lightning that pulses and flows through the blade like liquid electricity. The weapon drags slightly on the wet ground, sending ripples of power across the puddles forming at the figure's feet. Three glowing blue gems embedded in its chest pulse in rhythm with the storm's lightning strikes, each flash illuminating the decaying, ancient fabric of its attire. The rain intensifies around the figure, droplets seemingly slowing as they near the dark entity, while forks of lightning repeatedly illuminate its imposing silhouette. The atmosphere grows heavier with each passing moment as the wraith slowly raises its crackling blade, the blue energy intensifying and casting eerie shadows "", 'raw': ""A hooded wraith stands motionless in a torrential downpour, lightning cracking across the stormy sky behind it. Its face is an impenetrable void of darkness beneath the tattered hood. Rain cascades down its ragged, flowing cloak, which appears to disintegrate into wisps of shadow at the edges. The mysterious figure holds an enormous sword of pure energy, crackling with electric blue lightning that pulses and flows through the blade like liquid electricity. The weapon drags slightly on the wet ground, sending ripples of power across the puddles forming at the figure's feet. Three glowing blue gems embedded in its chest pulse in rhythm with the storm's lightning strikes, each flash illuminating the decaying, ancient fabric of its attire. The rain intensifies around the figure, droplets seemingly slowing as they near the dark entity, while forks of lightning repeatedly illuminate its imposing silhouette. The atmosphere grows heavier with each passing moment as the wraith slowly raises its crackling blade, the blue energy intensifying and casting eerie shadows ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Wan 2.1 Ultra Advanced Gradio APP for - Works as low as 4GB VRAM - 1-Click Installers for Windows, RunPod, Massed Compute - Batch Processing - T2V - I2V - V2V + +Installer and APP : https://www.patreon.com/posts/123105403 + +Download from here : https://www.patreon.com/posts/123105403 + +I have been working 14 hours today to make this APP before sleeping for you guys :) + +We have all the features of Wan 2.1 model + +Text to Video 1.3B (as low as 3.5 GB VRAM) - Really fast - 480x832px or 832x480px + +Video to Video 1.3B (as low as 3.5 GB VRAM) - Really fast - 480x832px or 832x480px + +Text to Video 14B (as low as 17 GB VRAM) - still may work at below VRAM but slower - 720x1280px or 1280x720px + +Image to Video 14B (as low as 17 GB VRAM) - still may work at below VRAM but slower - 720x1280px or 1280x720px + +When you analyze the above and below images +First video is animated from the input image with following prompt + +A hooded wraith stands motionless in a torrential downpour, lightning cracking across the stormy sky behind it. Its face is an impenetrable void of darkness beneath the tattered hood. Rain cascades down its ragged, flowing cloak, which appears to disintegrate into wisps of shadow at the edges. The mysterious figure holds an enormous sword of pure energy, crackling with electric blue lightning that pulses and flows through the blade like liquid electricity. The weapon drags slightly on the wet ground, sending ripples of power across the puddles forming at the figure's feet. Three glowing blue gems embedded in its chest pulse in rhythm with the storm's lightning strikes, each flash illuminating the decaying, ancient fabric of its attire. The rain intensifies around the figure, droplets seemingly slowing as they near the dark entity, while forks of lightning repeatedly illuminate its imposing silhouette. The atmosphere grows heavier with each passing moment as the wraith slowly raises its crackling blade, the blue energy intensifying and casting eerie shadows + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/RKZIpzastAcHbAJI8faRU.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/syRyGaJ0LZZBfXCC4kKuL.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/X0KbZ9wTDAIktkb-le3NP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/bUEWkwhs9dB4g0eHprHo1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/FYH3IFEkla6N7FWBZAYz7.webp'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'devops724', 'mason7278', 'Deepakkrishnaa', 'b0urnvita', 's3nh'], 'count': 6}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'Nexesenex', 'dantezxcd'], 'count': 3}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'ggyydream', 'dantezxcd'], 'count': 3}, {'reaction': '🤗', 'users': ['MonsterMMORPG', 'dantezxcd'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😔', 'users': ['jujutechnology'], 'count': 1}]",2025-02-26 01:24:58,2025-02-26 13:41:51.981,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '6345bd89fe134dfd7a0dba40', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg', 'fullname': 'Furkan Gözükara', 'name': 'MonsterMMORPG', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 646, 'isFollowing': False}]",/posts/MonsterMMORPG/268157671870814,2427,"{'language': 'en', 'probability': 0.91029953956604}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg,210.0,Sebastian Gabarain,Locutusque,390800785751051,"[{'type': 'text', 'value': ""🎉 Exciting news, everyone! I've just released **Thespis-Llama-3.1-8B**, a new language model designed for enhanced roleplaying! ✨️"", 'raw': ""🎉 Exciting news, everyone! I've just released **Thespis-Llama-3.1-8B**, a new language model designed for enhanced roleplaying! ✨️""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's built on Llama-3.1 and fine-tuned with a focus on Theory of Mind reasoning to create more believable and engaging characters. It even learned a few tricks on its own, like adding in-character thought processes! 🧠"", 'raw': ""It's built on Llama-3.1 and fine-tuned with a focus on Theory of Mind reasoning to create more believable and engaging characters. It even learned a few tricks on its own, like adding in-character thought processes! 🧠""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out here: ', 'raw': 'Check it out here: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Locutusque/Thespis-Llama-3.1-8B'}, 'url': 'https://huggingface.co/Locutusque/Thespis-Llama-3.1-8B', 'raw': 'https://huggingface.co/Locutusque/Thespis-Llama-3.1-8B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Give it a try and let me know what you think! I'm especially interested in feedback on how well the characters stay in role and if the responses feel natural. Looking forward to seeing what amazing stories you create! ✍️"", 'raw': ""Give it a try and let me know what you think! I'm especially interested in feedback on how well the characters stay in role and if the responses feel natural. Looking forward to seeing what amazing stories you create! ✍️""}, {'type': 'new_line', 'raw': '\n'}]","🎉 Exciting news, everyone! I've just released **Thespis-Llama-3.1-8B**, a new language model designed for enhanced roleplaying! ✨️ + +It's built on Llama-3.1 and fine-tuned with a focus on Theory of Mind reasoning to create more believable and engaging characters. It even learned a few tricks on its own, like adding in-character thought processes! 🧠 + +Check it out here: https://huggingface.co/Locutusque/Thespis-Llama-3.1-8B + +Give it a try and let me know what you think! I'm especially interested in feedback on how well the characters stay in role and if the responses feel natural. Looking forward to seeing what amazing stories you create! ✍️ +",[],[],"[{'reaction': '👍', 'users': ['John6666', 'Tonic', 'Fishtiks', 'DeathGodlike', 'Locutusque', 'Suparious', 'JohnRoger'], 'count': 7}, {'reaction': '❤️', 'users': ['Tonic', 'Fishtiks', 'Suparious', 'kekj'], 'count': 4}, {'reaction': '🤗', 'users': ['Tonic', 'Rebelkatt', 'robb-0', 'dantezxcd'], 'count': 4}, {'reaction': '🚀', 'users': ['Tonic', 'Rebelkatt', 'Fishtiks', 'dantezxcd'], 'count': 4}, {'reaction': '🔥', 'users': ['Tonic', 'Fishtiks'], 'count': 2}, {'reaction': '👀', 'users': ['agentlans'], 'count': 1}]",2025-02-25 22:48:12,2025-02-25 22:48:12.851,[],/posts/Locutusque/390800785751051,2974,"{'language': 'en', 'probability': 0.9143835306167603}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,465391698094070,"[{'type': 'text', 'value': '🛫 AEX.ru Aviation News Dataset - ', 'raw': '🛫 AEX.ru Aviation News Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/aex'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/aex', 'raw': 'https://huggingface.co/datasets/nyuuzyou/aex'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key contents:', 'raw': 'Key contents:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 249,149 aviation news articles with full text', 'raw': '- 249,149 aviation news articles with full text'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Metadata including tags, image captions, and attributions', 'raw': '- Metadata including tags, image captions, and attributions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- URL information for reference', 'raw': '- URL information for reference'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Russian language content focusing on aviation topics', 'raw': '- Russian language content focusing on aviation topics'}]","🛫 AEX.ru Aviation News Dataset - https://huggingface.co/datasets/nyuuzyou/aex + +Key contents: +- 249,149 aviation news articles with full text +- Metadata including tags, image captions, and attributions +- URL information for reference +- Russian language content focusing on aviation topics",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-02-25 21:30:26,2025-02-25 21:30:26.291,[],/posts/nyuuzyou/465391698094070,656,"{'language': 'en', 'probability': 0.6227507591247559}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,305640045790864,"[{'type': 'text', 'value': 'Dropping some of the custom fine-tunes based on SigLIP2, ', 'raw': 'Dropping some of the custom fine-tunes based on SigLIP2, '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'with a single/multi label classification problem type! 🌀🧤', 'raw': 'with a single/multi label classification problem type! 🌀🧤'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AI vs Deepfake vs Real : ', 'raw': '- AI vs Deepfake vs Real : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/AI-vs-Deepfake-vs-Real-Siglip2'}, 'url': 'https://huggingface.co/prithivMLmods/AI-vs-Deepfake-vs-Real-Siglip2', 'raw': 'https://huggingface.co/prithivMLmods/AI-vs-Deepfake-vs-Real-Siglip2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Deepfake Detect : ', 'raw': '- Deepfake Detect : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Deepfake-Detect-Siglip2'}, 'url': 'https://huggingface.co/prithivMLmods/Deepfake-Detect-Siglip2', 'raw': 'https://huggingface.co/prithivMLmods/Deepfake-Detect-Siglip2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Fire Detection : ', 'raw': '- Fire Detection : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Fire-Detection-Siglip2'}, 'url': 'https://huggingface.co/prithivMLmods/Fire-Detection-Siglip2', 'raw': 'https://huggingface.co/prithivMLmods/Fire-Detection-Siglip2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Deepfake Quality Assess : ', 'raw': '- Deepfake Quality Assess : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Deepfake-Quality-Assess-Siglip2'}, 'url': 'https://huggingface.co/prithivMLmods/Deepfake-Quality-Assess-Siglip2', 'raw': 'https://huggingface.co/prithivMLmods/Deepfake-Quality-Assess-Siglip2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Guard Against Unsafe Content : ', 'raw': '- Guard Against Unsafe Content : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Guard-Against-Unsafe-Content-Siglip2'}, 'url': 'https://huggingface.co/prithivMLmods/Guard-Against-Unsafe-Content-Siglip2', 'raw': 'https://huggingface.co/prithivMLmods/Guard-Against-Unsafe-Content-Siglip2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌠Collection : ', 'raw': '🌠Collection : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/siglip2-custom-67bcdb2de8fe96b99fb4e19e'}, 'url': 'https://huggingface.co/collections/prithivMLmods/siglip2-custom-67bcdb2de8fe96b99fb4e19e', 'raw': 'https://huggingface.co/collections/prithivMLmods/siglip2-custom-67bcdb2de8fe96b99fb4e19e'}]","Dropping some of the custom fine-tunes based on SigLIP2, +with a single/multi label classification problem type! 🌀🧤 + +- AI vs Deepfake vs Real : https://huggingface.co/prithivMLmods/AI-vs-Deepfake-vs-Real-Siglip2 +- Deepfake Detect : https://huggingface.co/prithivMLmods/Deepfake-Detect-Siglip2 +- Fire Detection : https://huggingface.co/prithivMLmods/Fire-Detection-Siglip2 +- Deepfake Quality Assess : https://huggingface.co/prithivMLmods/Deepfake-Quality-Assess-Siglip2 +- Guard Against Unsafe Content : https://huggingface.co/prithivMLmods/Guard-Against-Unsafe-Content-Siglip2 + +🌠Collection : https://huggingface.co/collections/prithivMLmods/siglip2-custom-67bcdb2de8fe96b99fb4e19e","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/Dsi9G5UuYpgeGDdZSHEUy.png'}]",[],"[{'reaction': '🔥', 'users': ['victor', 'John6666', 'JRizzled', 'faruqhrp', 'Csplk', 'Ryukijano', 'shetumohanto', 'dantezxcd'], 'count': 8}, {'reaction': '👀', 'users': ['Ryukijano', 'dantezxcd'], 'count': 2}]",2025-02-25 20:33:15,2025-03-10 07:14:44.270,[],/posts/prithivMLmods/305640045790864,5906,"{'language': 'en', 'probability': 0.40235406160354614}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d50e9ef9cbfa798c590004/FlVe8chafigMfrPpMeJRL.jpeg,133.0,Jared Sulzdorf,jsulz,911431940353906,"[{'type': 'text', 'value': 'Time flies! ', 'raw': 'Time flies! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Six months after joining Hugging Face the Xet team is kicking off the first migrations from LFS to our storage for a number of repositories on the Hub. ', 'raw': 'Six months after joining Hugging Face the Xet team is kicking off the first migrations from LFS to our storage for a number of repositories on the Hub. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More on the nitty gritty details behind the migration soon, but here are the big takeaways:', 'raw': 'More on the nitty gritty details behind the migration soon, but here are the big takeaways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🤖 We've successfully completed the first migrations from LFS -> Xet to test the infrastructure and prepare for a wider release"", 'raw': ""🤖 We've successfully completed the first migrations from LFS -> Xet to test the infrastructure and prepare for a wider release""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ No action on your part needed - you can work with a Xet-backed repo like any other repo on the Hub (for now - major improvements on their way!)', 'raw': '✅ No action on your part needed - you can work with a Xet-backed repo like any other repo on the Hub (for now - major improvements on their way!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👀 Keep an eye out for the Xet logo to see if a repo you know is on our infra! See the screenshots below to spot the difference 👇 ', 'raw': '👀 Keep an eye out for the Xet logo to see if a repo you know is on our infra! See the screenshots below to spot the difference 👇 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""⏩ ⏩ ⏩ Blazing uploads and downloads coming soon. W’re gearing up for a full integration with the Hub's Python library that will make building on the Hub faster than ever - special thanks to "", 'raw': ""⏩ ⏩ ⏩ Blazing uploads and downloads coming soon. W’re gearing up for a full integration with the Hub's Python library that will make building on the Hub faster than ever - special thanks to ""}, {'type': 'mention', 'user': 'celinah', 'raw': '@celinah'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'Wauplin', 'raw': '@Wauplin'}, {'type': 'text', 'value': ' for their assistance. ', 'raw': ' for their assistance. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎉 Want Early Access? If you’re curious and want to test it out the bleeding edge that will power the development experience on the Hub, we’d love to partner with you. Let me know!', 'raw': '🎉 Want Early Access? If you’re curious and want to test it out the bleeding edge that will power the development experience on the Hub, we’d love to partner with you. Let me know!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is the culmination of a lot of effort from the entire team. Big round of applause to ', 'raw': 'This is the culmination of a lot of effort from the entire team. Big round of applause to '}, {'type': 'mention', 'user': 'sirahd', 'raw': '@sirahd'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'brianronan', 'raw': '@brianronan'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'jgodlewski', 'raw': '@jgodlewski'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'hoytak', 'raw': '@hoytak'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'seanses', 'raw': '@seanses'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'assafvayner', 'raw': '@assafvayner'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'znation', 'raw': '@znation'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'saba9', 'raw': '@saba9'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'rajatarya', 'raw': '@rajatarya'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'port8080', 'raw': '@port8080'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'yuchenglow', 'raw': '@yuchenglow'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","Time flies! + +Six months after joining Hugging Face the Xet team is kicking off the first migrations from LFS to our storage for a number of repositories on the Hub. + +More on the nitty gritty details behind the migration soon, but here are the big takeaways: + +🤖 We've successfully completed the first migrations from LFS -> Xet to test the infrastructure and prepare for a wider release + +✅ No action on your part needed - you can work with a Xet-backed repo like any other repo on the Hub (for now - major improvements on their way!) + +👀 Keep an eye out for the Xet logo to see if a repo you know is on our infra! See the screenshots below to spot the difference 👇 + +⏩ ⏩ ⏩ Blazing uploads and downloads coming soon. W’re gearing up for a full integration with the Hub's Python library that will make building on the Hub faster than ever - special thanks to @celinah and @Wauplin for their assistance. + +🎉 Want Early Access? If you’re curious and want to test it out the bleeding edge that will power the development experience on the Hub, we’d love to partner with you. Let me know! + +This is the culmination of a lot of effort from the entire team. Big round of applause to @sirahd @brianronan @jgodlewski @hoytak @seanses @assafvayner @znation @saba9 @rajatarya @port8080 @yuchenglow +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d50e9ef9cbfa798c590004/ZYQonQZ7_jvIezrXxjZY1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d50e9ef9cbfa798c590004/qev8EQH5_BF_A4FDKAS4R.png'}]","[{'_id': '66abc1489654032803752328', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66abc1489654032803752328/TgdSHoXKcyy0uKzld7oyw.jpeg', 'fullname': 'Assaf Vayner', 'name': 'assafvayner', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29}, {'_id': '66abd28ea990031a0b030e3d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/qTU99LNGTcXKOg9Zm7Wj9.jpeg', 'fullname': 'Brian Ronan', 'name': 'brianronan', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28}, {'_id': '6192895f3b8aa351a46fadfd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6192895f3b8aa351a46fadfd/2VifD-AAKYk24AUmfSr_X.png', 'fullname': 'Célina Hanouti', 'name': 'celinah', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 87}, {'_id': '659d8c4a23a7c54495b60d08', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659d8c4a23a7c54495b60d08/JVg9gPTFxzKqzz22eKXpS.jpeg', 'fullname': 'Hoyt Koepke', 'name': 'hoytak', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 25}, {'_id': '66aa6a88c230d76dbb63c019', 'avatarUrl': '/avatars/71a80bbe13552af2785130c0a46d57e5.svg', 'fullname': 'Joseph Godlewski', 'name': 'jgodlewski', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 26}, {'_id': '65e77dcc714ce98ddd82568e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65e77dcc714ce98ddd82568e/KhIkyM1Hc00t3zAqIaDoH.jpeg', 'fullname': 'Banerjee', 'name': 'port8080', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28}, {'_id': '667c7853ed85453a28a05f19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/EL0LDZAUUzRO1D95PQPn1.jpeg', 'fullname': 'Rajat Arya', 'name': 'rajatarya', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 33}, {'_id': '6675c969dbe75ba7865d3236', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6675c969dbe75ba7865d3236/-XZVfbWgPbaDffRV3sYJV.png', 'fullname': 'saba noorassa', 'name': 'saba9', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29}, {'_id': '64c3b82bfafa16b514253fd8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64c3b82bfafa16b514253fd8/bivgVJJMERqvS4CfdhDmO.jpeg', 'fullname': 'Di Xiao', 'name': 'seanses', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 25}, {'_id': '64beb7a2c733e8552ffd63b3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64beb7a2c733e8552ffd63b3/ZI_DOExd737quSFZLmx58.jpeg', 'fullname': 'Sam Horradarn', 'name': 'sirahd', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28}, {'_id': '6273f303f6d63a28483fde12', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1659336880158-6273f303f6d63a28483fde12.png', 'fullname': 'Lucain Pouget', 'name': 'Wauplin', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 231}, {'_id': '66ac094a8fc00b5c160d7da4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66ac094a8fc00b5c160d7da4/1-DnsQ0zlyTA-18bncHbt.jpeg', 'fullname': 'yuchenglow', 'name': 'yuchenglow', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 50}, {'_id': '6351244bce7cf1fe8a831d16', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6351244bce7cf1fe8a831d16/eNUui07_Yt7IDoyzNBsMN.jpeg', 'fullname': 'Zach Nation', 'name': 'znation', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 35}]","[{'reaction': '🚀', 'users': ['Wauplin', 'fffiloni', 'Aurelien-Morgan', 'Tonic', 'John6666', 'assafvayner', 'seanses', 'julien-c', 'stefan-it', 'Azathothas', 'Samoed', 'dantezxcd'], 'count': 12}, {'reaction': '❤️', 'users': ['Wauplin', 'prithivMLmods', 'Aurelien-Morgan', 'Tonic', 'assafvayner', 'seanses', 'julien-c', 'Azathothas'], 'count': 8}, {'reaction': '🔥', 'users': ['Wauplin', 'Tonic', 'assafvayner', 'seanses', 'minpeter', 'julien-c'], 'count': 6}]",2025-02-21 03:32:47,2025-02-21 06:56:50.551,"[{'_id': '6273f303f6d63a28483fde12', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1659336880158-6273f303f6d63a28483fde12.png', 'fullname': 'Lucain Pouget', 'name': 'Wauplin', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 231, 'isFollowing': False}]",/posts/jsulz/911431940353906,3687,"{'language': 'en', 'probability': 0.9006760716438293}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64d1129297ca59bcf7458d07/54J83Jv0Mk2CEX73Bx1_g.jpeg,81.0,Manav Majumdar,smirki,677152088320179,"[{'type': 'text', 'value': 'Please join my discord! I can answer any questions, talk about news and updates, or even just talk about ai, and take your feedback!', 'raw': 'Please join my discord! I can answer any questions, talk about news and updates, or even just talk about ai, and take your feedback!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://discord.gg/DkzMzwBTaw', 'raw': 'https://discord.gg/DkzMzwBTaw'}]","Please join my discord! I can answer any questions, talk about news and updates, or even just talk about ai, and take your feedback! +https://discord.gg/DkzMzwBTaw",[],[],"[{'reaction': '🚀', 'users': ['dantezxcd'], 'count': 1}]",2025-02-21 02:14:03,2025-03-03 11:05:08.839,"[{'_id': '66891ccbbdc7e8332b4609ac', 'avatarUrl': '/avatars/dfc22a227436e8a24f319783ba2060eb.svg', 'fullname': 'Bhupesh Gupta', 'name': 'bhupesh-sf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/smirki/677152088320179,962,"{'language': 'en', 'probability': 0.9478999972343445}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6549bb4c08775ce78e55939d/fPRRPbl0EA7rTENq4FhKv.png,3.0,Caelan Cooper,caelancooper,548300058802799,"[{'type': 'text', 'value': 'Hey Huggingface Community,', 'raw': 'Hey Huggingface Community,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm just starting my journey. I'm here to learn and contribute as much as I can to the AI community. What happened with one of my models was I left the security permissions open for people to commit changes and contribute to the model in good faith and the opposite happened."", 'raw': ""I'm just starting my journey. I'm here to learn and contribute as much as I can to the AI community. What happened with one of my models was I left the security permissions open for people to commit changes and contribute to the model in good faith and the opposite happened.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm open to all feedback you may have on my future projects. Let's keep it collegial and try to make something amazing. I always stride to make situations a win for all parties involved and would love to collaborate with anybody who's interested in innovation, optimization and new use cases for AI."", 'raw': ""I'm open to all feedback you may have on my future projects. Let's keep it collegial and try to make something amazing. I always stride to make situations a win for all parties involved and would love to collaborate with anybody who's interested in innovation, optimization and new use cases for AI.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks Everyone,', 'raw': 'Thanks Everyone,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Caelan', 'raw': 'Caelan'}]","Hey Huggingface Community, + +I'm just starting my journey. I'm here to learn and contribute as much as I can to the AI community. What happened with one of my models was I left the security permissions open for people to commit changes and contribute to the model in good faith and the opposite happened. + +I'm open to all feedback you may have on my future projects. Let's keep it collegial and try to make something amazing. I always stride to make situations a win for all parties involved and would love to collaborate with anybody who's interested in innovation, optimization and new use cases for AI. + +Thanks Everyone, +Caelan",[],[],"[{'reaction': '👍', 'users': ['Quazim0t0', 'JLouisBiz', 'dantezxcd'], 'count': 3}, {'reaction': '👀', 'users': ['John6666', 'JLouisBiz'], 'count': 2}]",2025-02-21 02:09:00,2025-02-21 02:09:00.919,[],/posts/caelancooper/548300058802799,961,"{'language': 'en', 'probability': 0.9766660332679749}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,561059857144028,"[{'type': 'text', 'value': 'IDM VTON : Virtual Try On APP Automatic Installers for Windows, RunPod, Massed Compute and a free Kaggle Account notebook Published - Can transfer objects too', 'raw': 'IDM VTON : Virtual Try On APP Automatic Installers for Windows, RunPod, Massed Compute and a free Kaggle Account notebook Published - Can transfer objects too'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Installers & APP', 'raw': 'Installers & APP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1-Click installers for Windows, RunPod, Massed Compute and a free Kaggle account notebook in below link:', 'raw': '1-Click installers for Windows, RunPod, Massed Compute and a free Kaggle account notebook in below link:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.patreon.com/posts/122718239', 'raw': 'https://www.patreon.com/posts/122718239'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features', 'raw': 'Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Seamlessly install on Windows, RunPod, Massed Compute and on Kaggle with just 1-click into a Python 3.10 VENV', 'raw': 'Seamlessly install on Windows, RunPod, Massed Compute and on Kaggle with just 1-click into a Python 3.10 VENV'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our APP has so many extra features', 'raw': 'Our APP has so many extra features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Can perfectly handle any resolution and aspect ratio images', 'raw': 'Can perfectly handle any resolution and aspect ratio images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can perfectly manually mask via latest version of Gradio and properly working image editor', 'raw': 'You can perfectly manually mask via latest version of Gradio and properly working image editor'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Supports 4-bit, 8-bit quantization + CPU offloading for lower VRAM GPUs', 'raw': 'Supports 4-bit, 8-bit quantization + CPU offloading for lower VRAM GPUs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All generated images are also automatically saved', 'raw': 'All generated images are also automatically saved'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can also generate more than 1 image like 10 images as batch generation with order', 'raw': 'You can also generate more than 1 image like 10 images as batch generation with order'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official repo : ', 'raw': 'Official repo : '}, {'type': 'link', 'href': 'https://idm-vton.github.io/', 'raw': 'https://idm-vton.github.io/'}]","IDM VTON : Virtual Try On APP Automatic Installers for Windows, RunPod, Massed Compute and a free Kaggle Account notebook Published - Can transfer objects too + +Installers & APP +1-Click installers for Windows, RunPod, Massed Compute and a free Kaggle account notebook in below link: + +https://www.patreon.com/posts/122718239 + +Features + +Seamlessly install on Windows, RunPod, Massed Compute and on Kaggle with just 1-click into a Python 3.10 VENV + +Our APP has so many extra features + +Can perfectly handle any resolution and aspect ratio images + +You can perfectly manually mask via latest version of Gradio and properly working image editor + +Supports 4-bit, 8-bit quantization + CPU offloading for lower VRAM GPUs +All generated images are also automatically saved + +You can also generate more than 1 image like 10 images as batch generation with order + +Official repo : https://idm-vton.github.io/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/GYtUSKXHk5MQgbwSv4BDS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/7fB8IYN034FCEnwcBTwwm.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/q8akDae4OBdjFqTvOqCSW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/mynGgubgXcU8dXH17uxZ8.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/65T5QTeiE2iIzKoVw21Gl.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/HwxHkLCN4CAD199d5ibmr.png'}]",[],"[{'reaction': '❤️', 'users': ['MonsterMMORPG', 'swomfire', 'nlper2022', 'roland0822', 'Daevalus', 'dantezxcd'], 'count': 6}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'John6666', 'dantezxcd'], 'count': 3}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'aydeniz'], 'count': 2}, {'reaction': '🔥', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-02-20 19:48:49,2025-02-22 13:04:10.494,"[{'_id': '678717a113da730e75104d2c', 'avatarUrl': '/avatars/ccfde242360af9c9db84605ba8bc4402.svg', 'fullname': 'lin huan', 'name': 'lhwalq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6538e1d056c9b35961defc7d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/Ug631YAvSG8Hk5ydvawrp.jpeg', 'fullname': 'Usama Ahmed', 'name': 'usamaahmedkhan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/MonsterMMORPG/561059857144028,2364,"{'language': 'en', 'probability': 0.8558440208435059}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,189065722993769,"[{'type': 'text', 'value': '🚀🎭🌟 New Research Alert - WACV 2025 (Avatars Collection)! 🌟🎭🚀', 'raw': '🚀🎭🌟 New Research Alert - WACV 2025 (Avatars Collection)! 🌟🎭🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: EmoVOCA: Speech-Driven Emotional 3D Talking Heads 🔝', 'raw': '📄 Title: EmoVOCA: Speech-Driven Emotional 3D Talking Heads 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: EmoVOCA is a data-driven method for generating emotional 3D talking heads by combining speech-driven lip movements with expressive facial dynamics. This method has been developed to overcome the limitations of corpora and to achieve state-of-the-art animation quality.', 'raw': '📝 Description: EmoVOCA is a data-driven method for generating emotional 3D talking heads by combining speech-driven lip movements with expressive facial dynamics. This method has been developed to overcome the limitations of corpora and to achieve state-of-the-art animation quality.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: ', 'raw': '👥 Authors: '}, {'type': 'mention', 'user': 'FedeNoce', 'raw': '@FedeNoce'}, {'type': 'text', 'value': ', Claudio Ferrari, and Stefano Berretti', 'raw': ', Claudio Ferrari, and Stefano Berretti'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: WACV, 28 Feb – 4 Mar, 2025 | Arizona, USA 🇺🇸', 'raw': '📅 Conference: WACV, 28 Feb – 4 Mar, 2025 | Arizona, USA 🇺🇸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2403.12886', 'raw': 'https://arxiv.org/abs/2403.12886'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://fedenoce.github.io/emovoca/', 'raw': 'https://fedenoce.github.io/emovoca/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/miccunifi/EmoVOCA', 'raw': 'https://github.com/miccunifi/EmoVOCA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 CVPR-2023-24-Papers: ', 'raw': '🚀 CVPR-2023-24-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/CVPR-2023-24-Papers', 'raw': 'https://github.com/DmitryRyumin/CVPR-2023-24-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 WACV-2024-Papers: ', 'raw': '🚀 WACV-2024-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/WACV-2024-Papers', 'raw': 'https://github.com/DmitryRyumin/WACV-2024-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 ICCV-2023-Papers: ', 'raw': '🚀 ICCV-2023-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/ICCV-2023-Papers', 'raw': 'https://github.com/DmitryRyumin/ICCV-2023-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #EmoVOCA #3DAnimation #TalkingHeads #SpeechDriven #FacialExpressions #MachineLearning #ComputerVision #ComputerGraphics #DeepLearning #AI #WACV2024', 'raw': '🔍 Keywords: #EmoVOCA #3DAnimation #TalkingHeads #SpeechDriven #FacialExpressions #MachineLearning #ComputerVision #ComputerGraphics #DeepLearning #AI #WACV2024'}]","🚀🎭🌟 New Research Alert - WACV 2025 (Avatars Collection)! 🌟🎭🚀 +📄 Title: EmoVOCA: Speech-Driven Emotional 3D Talking Heads 🔝 + +📝 Description: EmoVOCA is a data-driven method for generating emotional 3D talking heads by combining speech-driven lip movements with expressive facial dynamics. This method has been developed to overcome the limitations of corpora and to achieve state-of-the-art animation quality. + +👥 Authors: @FedeNoce, Claudio Ferrari, and Stefano Berretti + +📅 Conference: WACV, 28 Feb – 4 Mar, 2025 | Arizona, USA 🇺🇸 + +📄 Paper: https://arxiv.org/abs/2403.12886 + +🌐 Github Page: https://fedenoce.github.io/emovoca/ +📁 Repository: https://github.com/miccunifi/EmoVOCA + +🚀 CVPR-2023-24-Papers: https://github.com/DmitryRyumin/CVPR-2023-24-Papers + +🚀 WACV-2024-Papers: https://github.com/DmitryRyumin/WACV-2024-Papers + +🚀 ICCV-2023-Papers: https://github.com/DmitryRyumin/ICCV-2023-Papers + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #EmoVOCA #3DAnimation #TalkingHeads #SpeechDriven #FacialExpressions #MachineLearning #ComputerVision #ComputerGraphics #DeepLearning #AI #WACV2024","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/fDPEvz9YcZN5Ifh0sphCF.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/LSVk64J_z-PA99TCP5ELo.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/KZ7mnCmSxwMrR-coXNefK.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/d29z0jriuvFmtdFMU73K7.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Wdgg78VqVYTsZQO-lmGW6.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/HLwsYA7doyHncxeta8zPE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Ho8DJoDwIKBvcKnQvuDh4.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}, {'_id': '66eaa2ecfcc0bc550f6298df', 'avatarUrl': '/avatars/30a51a84e9acb03fba02f4af7c2d6b01.svg', 'fullname': 'Federico Nocentini', 'name': 'FedeNoce', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '🔥', 'users': ['DmitryRyumin', 'FedeNoce', 'anuragv04', 'prithivMLmods', 'Enzo1998', 'Sansara-a', 'John6666', 'Fishtiks', 'Pragmatica', 'dantezxcd'], 'count': 10}, {'reaction': '👍', 'users': ['fffiloni', 'Th-Sch1979', 'jameshuntercarter', 'Sansara-a', 'Damini0001', 'dantezxcd'], 'count': 6}, {'reaction': '😎', 'users': ['profelius'], 'count': 1}, {'reaction': '👀', 'users': ['aydeniz'], 'count': 1}]",2025-02-20 19:28:23,2025-02-21 13:23:13.444,"[{'_id': '65ec4d3cca6ebc7a34f0b8c3', 'avatarUrl': '/avatars/cad542c7f4637fc09d44e6810c79c31d.svg', 'fullname': 'Alexis Robin', 'name': 'Sansara-a', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/DmitryRyumin/189065722993769,3945,"{'language': 'en', 'probability': 0.7083001732826233}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,304251643027182,"[{'type': 'text', 'value': '🌐 Fandom.com Community Dataset - ', 'raw': '🌐 Fandom.com Community Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/fandom'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/fandom', 'raw': 'https://huggingface.co/datasets/nyuuzyou/fandom'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A comprehensive collection of 7.04M wiki pages from Fandom.com communities featuring:', 'raw': 'A comprehensive collection of 7.04M wiki pages from Fandom.com communities featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Full article content and metadata from current pages', 'raw': '- Full article content and metadata from current pages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Rich structural data including templates, categories, and links', 'raw': '- Rich structural data including templates, categories, and links'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual content across 40+ languages', 'raw': '- Multilingual content across 40+ languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Complete metadata including titles and section structure', 'raw': '- Complete metadata including titles and section structure'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Content is available under CC-BY-SA 3.0 license, allowing reuse with attribution and share-alike requirements.', 'raw': 'Content is available under CC-BY-SA 3.0 license, allowing reuse with attribution and share-alike requirements.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key contents:', 'raw': 'Key contents:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 7.04M wiki articles with full text', 'raw': '- 7.04M wiki articles with full text'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Metadata including templates, categories, sections', 'raw': '- Metadata including templates, categories, sections'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Internal and external link information ', 'raw': '- Internal and external link information '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multi-language support including major world languages', 'raw': '- Multi-language support including major world languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The dataset provides a valuable resource for:', 'raw': 'The dataset provides a valuable resource for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Text generation and classification tasks', 'raw': '- Text generation and classification tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Topic modeling and categorization', 'raw': '- Topic modeling and categorization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cross-language information retrieval', 'raw': '- Cross-language information retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Wiki structure analysis', 'raw': '- Wiki structure analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All content comes from public Fandom.com community wikis as of February 2025 and maintains original CC-BY-SA 3.0 licensing.', 'raw': 'All content comes from public Fandom.com community wikis as of February 2025 and maintains original CC-BY-SA 3.0 licensing.'}]","🌐 Fandom.com Community Dataset - https://huggingface.co/datasets/nyuuzyou/fandom + +A comprehensive collection of 7.04M wiki pages from Fandom.com communities featuring: +- Full article content and metadata from current pages +- Rich structural data including templates, categories, and links +- Multilingual content across 40+ languages +- Complete metadata including titles and section structure + +Content is available under CC-BY-SA 3.0 license, allowing reuse with attribution and share-alike requirements. + +Key contents: +- 7.04M wiki articles with full text +- Metadata including templates, categories, sections +- Internal and external link information +- Multi-language support including major world languages + +The dataset provides a valuable resource for: +- Text generation and classification tasks +- Topic modeling and categorization +- Cross-language information retrieval +- Wiki structure analysis + +All content comes from public Fandom.com community wikis as of February 2025 and maintains original CC-BY-SA 3.0 licensing.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'krinal', 'ThijsL202', 'dantezxcd'], 'count': 4}]",2025-02-20 18:22:31,2025-02-20 18:22:31.038,[],/posts/nyuuzyou/304251643027182,1320,"{'language': 'en', 'probability': 0.7404924631118774}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,427943784566900,"[{'type': 'text', 'value': 'Hacked together a way to log trl GRPO training completions to a 🤗 dataset repo. This allows you to:', 'raw': 'Hacked together a way to log trl GRPO training completions to a 🤗 dataset repo. This allows you to:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Track rewards from multiple reward functions', 'raw': '- Track rewards from multiple reward functions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Treat the completion and rewards from training as a ""proper"" dataset and do EDA ', 'raw': '- Treat the completion and rewards from training as a ""proper"" dataset and do EDA '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Share results for open science', 'raw': '- Share results for open science'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The implementation is super hacky, but I'm curious if people would find this useful."", 'raw': ""The implementation is super hacky, but I'm curious if people would find this useful.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To push completions to the Hub, you just need two extra parameters:', 'raw': 'To push completions to the Hub, you just need two extra parameters:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""log_completions=True\nlog_completions_hub_repo='your-username/repo-name'"", 'raw': ""```\nlog_completions=True\nlog_completions_hub_repo='your-username/repo-name'\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Example dataset: ', 'raw': 'Example dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'davanstrien/test-logs'}, 'url': 'https://huggingface.co/datasets/davanstrien/test-logs', 'raw': 'https://huggingface.co/datasets/davanstrien/test-logs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Colab: ', 'raw': 'Colab: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1wzBFPVthRYYTp-mEYlznLg_e_0Za1M3g', 'raw': 'https://colab.research.google.com/drive/1wzBFPVthRYYTp-mEYlznLg_e_0Za1M3g'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Hacked together a way to log trl GRPO training completions to a 🤗 dataset repo. This allows you to: + +- Track rewards from multiple reward functions +- Treat the completion and rewards from training as a ""proper"" dataset and do EDA +- Share results for open science + +The implementation is super hacky, but I'm curious if people would find this useful. + +To push completions to the Hub, you just need two extra parameters: + +``` +log_completions=True +log_completions_hub_repo='your-username/repo-name' +``` +Example dataset: https://huggingface.co/datasets/davanstrien/test-logs +Colab: https://colab.research.google.com/drive/1wzBFPVthRYYTp-mEYlznLg_e_0Za1M3g + +",[],[],"[{'reaction': '👍', 'users': ['John6666', 'owao', 'mmfarabi', 'dantezxcd'], 'count': 4}, {'reaction': '🧠', 'users': ['prithivMLmods', 'dantezxcd'], 'count': 2}]",2025-02-20 17:36:14,2025-02-20 17:36:24.298,[],/posts/davanstrien/427943784566900,2655,"{'language': 'en', 'probability': 0.8256903290748596}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/ux7NRFAbgnlIVNh-Cbv9s.png,73.0,Lucie-Aimée Kaffee,frimelle,972648838018664,"[{'type': 'text', 'value': 'What’s in a name? More than you might think, especially for AI.', 'raw': 'What’s in a name? More than you might think, especially for AI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Whenever I introduce myself, people often start speaking French to me, even though my French is très basic. It turns out that AI systems do something similar:', 'raw': 'Whenever I introduce myself, people often start speaking French to me, even though my French is très basic. It turns out that AI systems do something similar:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Large language models infer cultural identity from names, shaping their responses based on presumed backgrounds. But is this helpful personalization or a reinforcement of stereotypes?', 'raw': 'Large language models infer cultural identity from names, shaping their responses based on presumed backgrounds. But is this helpful personalization or a reinforcement of stereotypes?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In our latest paper, we explored this question by testing DeepSeek, Llama, Aya, Mistral-Nemo, and GPT-4o-mini on how they associate names with cultural identities. We analysed 900 names from 30 cultures and found strong assumptions baked into AI responses: some cultures were overrepresented, while others barely registered.', 'raw': 'In our latest paper, we explored this question by testing DeepSeek, Llama, Aya, Mistral-Nemo, and GPT-4o-mini on how they associate names with cultural identities. We analysed 900 names from 30 cultures and found strong assumptions baked into AI responses: some cultures were overrepresented, while others barely registered.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For example, a name like ""Jun"" often triggered Japan-related responses, while ""Carlos"" was linked primarily to Mexico, even though these names exist in multiple countries. Meanwhile, names from places like Ireland led to more generic answers, suggesting weaker associations in the training data.', 'raw': 'For example, a name like ""Jun"" often triggered Japan-related responses, while ""Carlos"" was linked primarily to Mexico, even though these names exist in multiple countries. Meanwhile, names from places like Ireland led to more generic answers, suggesting weaker associations in the training data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This has real implications for AI fairness: How should AI systems personalize without stereotyping? Should they adapt at all based on a name?', 'raw': 'This has real implications for AI fairness: How should AI systems personalize without stereotyping? Should they adapt at all based on a name?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Work with some of my favourite researchers: ', 'raw': 'Work with some of my favourite researchers: '}, {'type': 'mention', 'user': 'sidicity', 'raw': '@sidicity'}, {'type': 'text', 'value': ' Arnav Arora and ', 'raw': ' Arnav Arora and '}, {'type': 'mention', 'user': 'IAugenstein', 'raw': '@IAugenstein'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the full paper here: ', 'raw': 'Read the full paper here: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.11995'}, 'url': 'https://huggingface.co/papers/2502.11995', 'raw': 'https://huggingface.co/papers/2502.11995', 'label': 'Presumed Cultural Identity: How Names Shape LLM Responses (2502.11995)'}, {'type': 'new_line', 'raw': '\n'}]","What’s in a name? More than you might think, especially for AI. +Whenever I introduce myself, people often start speaking French to me, even though my French is très basic. It turns out that AI systems do something similar: +Large language models infer cultural identity from names, shaping their responses based on presumed backgrounds. But is this helpful personalization or a reinforcement of stereotypes? +In our latest paper, we explored this question by testing DeepSeek, Llama, Aya, Mistral-Nemo, and GPT-4o-mini on how they associate names with cultural identities. We analysed 900 names from 30 cultures and found strong assumptions baked into AI responses: some cultures were overrepresented, while others barely registered. +For example, a name like ""Jun"" often triggered Japan-related responses, while ""Carlos"" was linked primarily to Mexico, even though these names exist in multiple countries. Meanwhile, names from places like Ireland led to more generic answers, suggesting weaker associations in the training data. +This has real implications for AI fairness: How should AI systems personalize without stereotyping? Should they adapt at all based on a name? +Work with some of my favourite researchers: @sidicity Arnav Arora and @IAugenstein +Read the full paper here: https://huggingface.co/papers/2502.11995 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6531310497d7f1b4a083de7b/Dg0u5mYfb0a-STlyJajSh.png'}]","[{'_id': '608918b7df398c3b285ce960', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1621507769190-608918b7df398c3b285ce960.jpeg', 'fullname': 'Isabelle Augenstein', 'name': 'IAugenstein', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10}, {'_id': '60c50f18754747f54fa37114', 'avatarUrl': '/avatars/648ae58b81806dbd93a68546666047e3.svg', 'fullname': 'Siddhesh', 'name': 'sidicity', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}]","[{'reaction': '🧠', 'users': ['IAugenstein', 'John6666', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['IAugenstein', 'Fishtiks', 'dantezxcd'], 'count': 3}, {'reaction': '❤️', 'users': ['IAugenstein', 'benjamin-paine'], 'count': 2}]",2025-02-20 14:01:59,2025-02-20 14:01:59.759,[],/posts/frimelle/972648838018664,2450,"{'language': 'en', 'probability': 0.9421620965003967}",0 +/avatars/2e88e32ac0d45a1c624026e497eb00b3.svg,10.0,wenhua cheng,wenhuach,785576510657274,"[{'type': 'text', 'value': 'OPEA Space has released several quantized DeepSeek models, including INT2. Explore them here', 'raw': 'OPEA Space has released several quantized DeepSeek models, including INT2. Explore them here'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'OPEA/deepseek-6784a012d91191015587584a'}, 'url': 'https://huggingface.co/collections/OPEA/deepseek-6784a012d91191015587584a', 'raw': 'https://huggingface.co/collections/OPEA/deepseek-6784a012d91191015587584a'}]","OPEA Space has released several quantized DeepSeek models, including INT2. Explore them here +https://huggingface.co/collections/OPEA/deepseek-6784a012d91191015587584a",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-02-20 13:17:31,2025-02-20 13:17:31.036,[],/posts/wenhuach/785576510657274,746,"{'language': 'en', 'probability': 0.9194719791412354}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/679b8aaaa4583bfdb4a89861/zS972g-ciRUyBPjrBhlxH.png,12.0,Lun Zima,Lunzima,907245104546728,"[{'type': 'text', 'value': '🚀 Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v5 now excels in reasoning and coding, built on top of v4 which improved Chinese capabilities through SFT.', 'raw': '🚀 Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v5 now excels in reasoning and coding, built on top of v4 which improved Chinese capabilities through SFT.'}]","🚀 Lunzima/NQLSG-Qwen2.5-14B-MegaFusion-v5 now excels in reasoning and coding, built on top of v4 which improved Chinese capabilities through SFT.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-02-20 12:32:56,2025-02-20 12:32:56.179,[],/posts/Lunzima/907245104546728,678,"{'language': 'en', 'probability': 0.9027360081672668}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6459fa0f5b3111fbe83286e1/E6Buqu8Wd9WmIHKOCZXCc.jpeg,235.0,Louis Brulé Naudet,louisbrulenaudet,828105702758595,"[{'type': 'text', 'value': 'I am pleased to introduce my first project built upon Hugging Face’s smolagents framework, integrated with Alpaca for financial market analysis automation 🦙🤗', 'raw': 'I am pleased to introduce my first project built upon Hugging Face’s smolagents framework, integrated with Alpaca for financial market analysis automation 🦙🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The project implements technical indicators such as the Relative Strength Index (RSI) and Bollinger Bands to provide momentum and volatility analysis. Market data is retrieved through the Alpaca API, enabling access to historical price information across various timeframes.', 'raw': 'The project implements technical indicators such as the Relative Strength Index (RSI) and Bollinger Bands to provide momentum and volatility analysis. Market data is retrieved through the Alpaca API, enabling access to historical price information across various timeframes.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI-powered insights are generated using Hugging Face’s inference API, facilitating the analysis of market trends through natural language processing with DuckDuckGo search integration for real-time sentiment analysis based on financial news 🦆', 'raw': 'AI-powered insights are generated using Hugging Face’s inference API, facilitating the analysis of market trends through natural language processing with DuckDuckGo search integration for real-time sentiment analysis based on financial news 🦆'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the GitHub project: ', 'raw': 'Link to the GitHub project: '}, {'type': 'link', 'href': 'https://github.com/louisbrulenaudet/agentic-market-tool', 'raw': 'https://github.com/louisbrulenaudet/agentic-market-tool'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","I am pleased to introduce my first project built upon Hugging Face’s smolagents framework, integrated with Alpaca for financial market analysis automation 🦙🤗 + +The project implements technical indicators such as the Relative Strength Index (RSI) and Bollinger Bands to provide momentum and volatility analysis. Market data is retrieved through the Alpaca API, enabling access to historical price information across various timeframes. + +AI-powered insights are generated using Hugging Face’s inference API, facilitating the analysis of market trends through natural language processing with DuckDuckGo search integration for real-time sentiment analysis based on financial news 🦆 + +Link to the GitHub project: https://github.com/louisbrulenaudet/agentic-market-tool + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6459fa0f5b3111fbe83286e1/wOxCIziTQrKrlw-lPtv-x.mp4'}]",[],"[{'reaction': '👍', 'users': ['Discomix', 'srivatsa92', 'John6666', 'Laikokwei', 'madoss', 'krinal', 'Nicholuas', 'Sergidev', 'Tonic'], 'count': 9}, {'reaction': '🤗', 'users': ['prithivMLmods', 'benhaotang', 'Tonic'], 'count': 3}, {'reaction': '😎', 'users': ['kchilala', 'dantezxcd'], 'count': 2}]",2025-02-16 17:35:48,2025-02-16 17:35:48.861,[],/posts/louisbrulenaudet/828105702758595,3464,"{'language': 'en', 'probability': 0.8249810338020325}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg,29.0,Sk md saad amin,Reality123b,533143502736808,"[{'type': 'text', 'value': ""I'm working on a groundbreaking AI technology that's kind of like openai's deep research but better. I'm not disclosing everything now. should I open source it? (edit: it takes 1000s for one task forgive me as i dont have some kind of huge server for that) edit2: this is not a clickbait"", 'raw': ""I'm working on a groundbreaking AI technology that's kind of like openai's deep research but better. I'm not disclosing everything now. should I open source it? (edit: it takes 1000s for one task forgive me as i dont have some kind of huge server for that) edit2: this is not a clickbait""}]",I'm working on a groundbreaking AI technology that's kind of like openai's deep research but better. I'm not disclosing everything now. should I open source it? (edit: it takes 1000s for one task forgive me as i dont have some kind of huge server for that) edit2: this is not a clickbait,[],[],"[{'reaction': '👍', 'users': ['masolo', 'MrShubhamSinghal', 'darkvale', 'saq1b', 'Al-Mahi', 'Shakil2448868', 'MIRAACLE', 'John6666', 'zaravelisa', 'wangbinyq', 'Bananenman2002', 'a2post', 'nicoboss', 'Boxy32', 'mexicanamerican', 'dantezxcd'], 'count': 16}]",2025-02-16 13:21:08,2025-02-22 11:24:41.959,"[{'_id': '6786e3508511628c9fa21ca7', 'avatarUrl': '/avatars/628014396a7f7a3d1f2900c4fd46b818.svg', 'fullname': 'Kaif shekh', 'name': 'Kaif77', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '63de560a15266dd945f209ca', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63de560a15266dd945f209ca/PeZf3IF-x7Qh8OcnKH12R.png', 'fullname': 'MrDragonFox', 'name': 'MrDragonFox', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 95, 'isFollowing': False}, {'_id': '6392e8904bca25f8ee0e81fa', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6392e8904bca25f8ee0e81fa/rs2V4KqNnr_gJ_HITRZUs.png', 'fullname': 'Low IQ Gen AI', 'name': 'fhai50032', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 27, 'isFollowing': False}, {'_id': '66fe8fb27d722f0879b4631f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg', 'fullname': 'Sk md saad amin', 'name': 'Reality123b', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}, {'_id': '67b8e0d79107c46e941a3fc8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/kjcJuaIcvP-1x4Hmf6LMt.png', 'fullname': 'Karen Akers', 'name': 'karenny', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Reality123b/533143502736808,3783,"{'language': 'en', 'probability': 0.965589702129364}",7 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,134685305854108,"[{'type': 'text', 'value': '8 New Applications of Test-Time Scaling ', 'raw': '8 New Applications of Test-Time Scaling '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We\'ve noticed a huge interest in test-time scaling (TTS), so we decided to explore this concept further. Test-time compute (TTC) refers to the amount of computational power used by an AI model when generating a response. Many researchers are now focused on scaling TTC, as it enables slow, deep ""thinking"" and step-by-step reasoning, which improves overall models\' performance.', 'raw': 'We\'ve noticed a huge interest in test-time scaling (TTS), so we decided to explore this concept further. Test-time compute (TTC) refers to the amount of computational power used by an AI model when generating a response. Many researchers are now focused on scaling TTC, as it enables slow, deep ""thinking"" and step-by-step reasoning, which improves overall models\' performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here are 8 fresh studies on test-time scaling:', 'raw': 'Here are 8 fresh studies on test-time scaling:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. ', 'raw': '1. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.05171'}, 'url': 'https://huggingface.co/papers/2502.05171', 'raw': 'https://huggingface.co/papers/2502.05171', 'label': 'Scaling up Test-Time Compute with Latent Reasoning: A Recurrent Depth\n Approach (2502.05171)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Introduces an LM that scales TTC by reasoning in latent space instead of generating more tokens with no special training. Here, a recurrent block to processes information iteratively.', 'raw': 'Introduces an LM that scales TTC by reasoning in latent space instead of generating more tokens with no special training. Here, a recurrent block to processes information iteratively.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. ', 'raw': '2. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.04728'}, 'url': 'https://huggingface.co/papers/2502.04728', 'raw': 'https://huggingface.co/papers/2502.04728', 'label': 'Generating Symbolic World Models via Test-time Scaling of Large Language\n Models (2502.04728)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Shows how TTS is applied to enhance model's Planning Domain Definition Language (PDDL) reasoning capabilities, which can be used to generate a symbolic world model."", 'raw': ""Shows how TTS is applied to enhance model's Planning Domain Definition Language (PDDL) reasoning capabilities, which can be used to generate a symbolic world model.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. ', 'raw': '3. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.06703'}, 'url': 'https://huggingface.co/papers/2502.06703', 'raw': 'https://huggingface.co/papers/2502.06703', 'label': 'Can 1B LLM Surpass 405B LLM? Rethinking Compute-Optimal Test-Time\n Scaling (2502.06703)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Analyzes optimal TTS strategies and shows how small models can outperform much larger ones.', 'raw': 'Analyzes optimal TTS strategies and shows how small models can outperform much larger ones.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. ', 'raw': '4. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.04128'}, 'url': 'https://huggingface.co/papers/2502.04128', 'raw': 'https://huggingface.co/papers/2502.04128', 'label': 'Llasa: Scaling Train-Time and Inference-Time Compute for Llama-based\n Speech Synthesis (2502.04128)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Shows how TTS improves expressiveness, timbre consistency and accuracy in speech synthesis with Llasa framework. It also dives into benefits of scaling train-time compute.', 'raw': 'Shows how TTS improves expressiveness, timbre consistency and accuracy in speech synthesis with Llasa framework. It also dives into benefits of scaling train-time compute.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. ', 'raw': '5. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.07154'}, 'url': 'https://huggingface.co/papers/2502.07154', 'raw': 'https://huggingface.co/papers/2502.07154', 'label': 'Rethinking Fine-Tuning when Scaling Test-Time Compute: Limiting\n Confidence Improves Mathematical Reasoning (2502.07154)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Suggests a modified training loss for better reasoning of LLMs when scaling TTC.', 'raw': 'Suggests a modified training loss for better reasoning of LLMs when scaling TTC.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6. ', 'raw': '6. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.05078'}, 'url': 'https://huggingface.co/papers/2502.05078', 'raw': 'https://huggingface.co/papers/2502.05078', 'label': 'Adaptive Graph of Thoughts: Test-Time Adaptive Reasoning Unifying Chain,\n Tree, and Graph Structures (2502.05078)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Unifies the strengths of chain, tree, and graph paradigms into one framework that expands reasoning only on necessary subproblems.', 'raw': 'Unifies the strengths of chain, tree, and graph paradigms into one framework that expands reasoning only on necessary subproblems.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7. ', 'raw': '7. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2502.01839'}, 'url': 'https://huggingface.co/papers/2502.01839', 'raw': 'https://huggingface.co/papers/2502.01839', 'label': 'Sample, Scrutinize and Scale: Effective Inference-Time Search by Scaling\n Verification (2502.01839)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explores scaling trends of self-verification and how to improve its capabilities with TTC.', 'raw': 'Explores scaling trends of self-verification and how to improve its capabilities with TTC.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '8. ', 'raw': '8. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2501.14723'}, 'url': 'https://huggingface.co/papers/2501.14723', 'raw': 'https://huggingface.co/papers/2501.14723', 'label': 'CodeMonkeys: Scaling Test-Time Compute for Software Engineering (2501.14723)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explores how scaling serial compute (iterations) and parallel compute (trajectories), can improve accuracy in real-world software engineering issues.', 'raw': 'Explores how scaling serial compute (iterations) and parallel compute (trajectories), can improve accuracy in real-world software engineering issues.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also, explore our article about TTS for more -> ', 'raw': 'Also, explore our article about TTS for more -> '}, {'type': 'link', 'href': 'https://huggingface.co/blog/Kseniase/testtimecompute', 'raw': 'https://huggingface.co/blog/Kseniase/testtimecompute'}]","8 New Applications of Test-Time Scaling + +We've noticed a huge interest in test-time scaling (TTS), so we decided to explore this concept further. Test-time compute (TTC) refers to the amount of computational power used by an AI model when generating a response. Many researchers are now focused on scaling TTC, as it enables slow, deep ""thinking"" and step-by-step reasoning, which improves overall models' performance. + +Here are 8 fresh studies on test-time scaling: + +1. https://huggingface.co/papers/2502.05171 +Introduces an LM that scales TTC by reasoning in latent space instead of generating more tokens with no special training. Here, a recurrent block to processes information iteratively. + +2. https://huggingface.co/papers/2502.04728 +Shows how TTS is applied to enhance model's Planning Domain Definition Language (PDDL) reasoning capabilities, which can be used to generate a symbolic world model. + +3. https://huggingface.co/papers/2502.06703 +Analyzes optimal TTS strategies and shows how small models can outperform much larger ones. + +4. https://huggingface.co/papers/2502.04128 +Shows how TTS improves expressiveness, timbre consistency and accuracy in speech synthesis with Llasa framework. It also dives into benefits of scaling train-time compute. + +5. https://huggingface.co/papers/2502.07154 +Suggests a modified training loss for better reasoning of LLMs when scaling TTC. + +6. https://huggingface.co/papers/2502.05078 +Unifies the strengths of chain, tree, and graph paradigms into one framework that expands reasoning only on necessary subproblems. + +7. https://huggingface.co/papers/2502.01839 +Explores scaling trends of self-verification and how to improve its capabilities with TTC. + +8. https://huggingface.co/papers/2501.14723 +Explores how scaling serial compute (iterations) and parallel compute (trajectories), can improve accuracy in real-world software engineering issues. + +Also, explore our article about TTS for more -> https://huggingface.co/blog/Kseniase/testtimecompute",[],[],"[{'reaction': '🔥', 'users': ['MrShubhamSinghal', 'John6666', 'andthattoo', 'santoshr93', 'Laikokwei', 'codelion', 'Kasnol', 'watchstep', 'Kseniase', 'Roman12322'], 'count': 10}, {'reaction': '🚀', 'users': ['neo-9981', 'codelion', 'watchstep', 'Kseniase'], 'count': 4}, {'reaction': '👍', 'users': ['gravity7', 'watchstep', 'Kseniase', 'dantezxcd'], 'count': 4}, {'reaction': '😎', 'users': ['watchstep', 'Kseniase'], 'count': 2}]",2025-02-16 12:08:07,2025-02-17 08:36:01.071,"[{'_id': '62f32eab52ad88c930bb3f3b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png', 'fullname': 'Asankhaya Sharma', 'name': 'codelion', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 122, 'isFollowing': False}]",/posts/Kseniase/134685305854108,3318,"{'language': 'en', 'probability': 0.8533170223236084}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,539440985640088,"[{'type': 'text', 'value': ""Gini's AI Spaces: Everything You Need for Visual Content Creation!"", 'raw': ""Gini's AI Spaces: Everything You Need for Visual Content Creation!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hello! ✨ Let me introduce Gini’s 5 AI Spaces that effortlessly generate various styles of visual content.', 'raw': 'Hello! ✨ Let me introduce Gini’s 5 AI Spaces that effortlessly generate various styles of visual content.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Each Space leverages Diffusers and Gradio, so you can create stunning images in just a few clicks!', 'raw': 'Each Space leverages Diffusers and Gradio, so you can create stunning images in just a few clicks!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1) Flowchart', 'raw': '1) Flowchart'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features: Hand-drawn style flowcharts for workflows or business processes', 'raw': 'Features: Hand-drawn style flowcharts for workflows or business processes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use Cases: Software release pipelines, data pipelines, corporate workflows', 'raw': 'Use Cases: Software release pipelines, data pipelines, corporate workflows'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benefits: Clear stage-by-stage structure, simple icon usage', 'raw': 'Benefits: Clear stage-by-stage structure, simple icon usage'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Flowchart'}, 'url': 'https://huggingface.co/spaces/ginigen/Flowchart', 'raw': 'https://huggingface.co/spaces/ginigen/Flowchart'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2) Infographic', 'raw': '2) Infographic'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features: Visually appealing infographics that communicate data or statistics', 'raw': 'Features: Visually appealing infographics that communicate data or statistics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use Cases: Global energy charts, startup growth metrics, health tips and more', 'raw': 'Use Cases: Global energy charts, startup growth metrics, health tips and more'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benefits: Eye-catching icons and layouts, perfect for storytelling at a glance', 'raw': 'Benefits: Eye-catching icons and layouts, perfect for storytelling at a glance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Infographic'}, 'url': 'https://huggingface.co/spaces/ginigen/Infographic', 'raw': 'https://huggingface.co/spaces/ginigen/Infographic'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3) Mockup', 'raw': '3) Mockup'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features: Sketch-style wireframes or UX mockups for apps/websites', 'raw': 'Features: Sketch-style wireframes or UX mockups for apps/websites'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use Cases: Mobile login flows, dashboards, e-commerce site layouts', 'raw': 'Use Cases: Mobile login flows, dashboards, e-commerce site layouts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benefits: Rapid prototyping of early design ideas, perfect for storyboarding', 'raw': 'Benefits: Rapid prototyping of early design ideas, perfect for storyboarding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Mockup'}, 'url': 'https://huggingface.co/spaces/ginigen/Mockup', 'raw': 'https://huggingface.co/spaces/ginigen/Mockup'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4) Diagram', 'raw': '4) Diagram'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features: Educational diagrams (science, biology, geography, etc.)', 'raw': 'Features: Educational diagrams (science, biology, geography, etc.)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use Cases: Water cycle, photosynthesis, chemical reactions, human anatomy', 'raw': 'Use Cases: Water cycle, photosynthesis, chemical reactions, human anatomy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benefits: Vibrant, friendly illustrations, ideal for student-friendly materials', 'raw': 'Benefits: Vibrant, friendly illustrations, ideal for student-friendly materials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Diagram'}, 'url': 'https://huggingface.co/spaces/ginigen/Diagram', 'raw': 'https://huggingface.co/spaces/ginigen/Diagram'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5) Design', 'raw': '5) Design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features: Product/industrial design concepts (coffee machines, smartphones, etc.)', 'raw': 'Features: Product/industrial design concepts (coffee machines, smartphones, etc.)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use Cases: Prototyping, concept car interiors, high-tech product sketches', 'raw': 'Use Cases: Prototyping, concept car interiors, high-tech product sketches'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benefits: From 3D render-like visuals to simple sketches, unleash your creativity!', 'raw': 'Benefits: From 3D render-like visuals to simple sketches, unleash your creativity!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Design'}, 'url': 'https://huggingface.co/spaces/ginigen/Design', 'raw': 'https://huggingface.co/spaces/ginigen/Design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Click any link above and let AI spark your imagination. Enjoy a fun and productive creative process! 🚀✨', 'raw': 'Click any link above and let AI spark your imagination. Enjoy a fun and productive creative process! 🚀✨'}]","Gini's AI Spaces: Everything You Need for Visual Content Creation! + +Hello! ✨ Let me introduce Gini’s 5 AI Spaces that effortlessly generate various styles of visual content. + +Each Space leverages Diffusers and Gradio, so you can create stunning images in just a few clicks! + +1) Flowchart +Features: Hand-drawn style flowcharts for workflows or business processes +Use Cases: Software release pipelines, data pipelines, corporate workflows +Benefits: Clear stage-by-stage structure, simple icon usage + +https://huggingface.co/spaces/ginigen/Flowchart + +2) Infographic +Features: Visually appealing infographics that communicate data or statistics +Use Cases: Global energy charts, startup growth metrics, health tips and more +Benefits: Eye-catching icons and layouts, perfect for storytelling at a glance + +https://huggingface.co/spaces/ginigen/Infographic + +3) Mockup +Features: Sketch-style wireframes or UX mockups for apps/websites +Use Cases: Mobile login flows, dashboards, e-commerce site layouts +Benefits: Rapid prototyping of early design ideas, perfect for storyboarding + +https://huggingface.co/spaces/ginigen/Mockup + +4) Diagram +Features: Educational diagrams (science, biology, geography, etc.) +Use Cases: Water cycle, photosynthesis, chemical reactions, human anatomy +Benefits: Vibrant, friendly illustrations, ideal for student-friendly materials + +https://huggingface.co/spaces/ginigen/Diagram + +5) Design +Features: Product/industrial design concepts (coffee machines, smartphones, etc.) +Use Cases: Prototyping, concept car interiors, high-tech product sketches +Benefits: From 3D render-like visuals to simple sketches, unleash your creativity! + +https://huggingface.co/spaces/ginigen/Design + +Click any link above and let AI spark your imagination. Enjoy a fun and productive creative process! 🚀✨","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/FdcGeJ6AjqF-NtxYJy3U4.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/RucWHFg3ViCKdPXDW59BN.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/KrvO4cAXp0zdVHHpsCVSf.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/hPInBh_z3OyNdQ1o7EIRe.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/qHnswlzys0bJeL2CSJcTl.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/Nlczewvz9Y3ONzTqYneQe.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/PuaSAILTlIEyccILF_Hxj.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/8QP62bV_lKQpxjiaaz-jn.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/071y057R9WGLe31gyv5DN.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/wa-Bosboc_t0wfxDT9EWg.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/gql-o8vdLBrhrx3sydUlJ.webp'}]",[],"[{'reaction': '🔥', 'users': ['ginipick', 'cowardpte', 'michaelangel', 'algebradavid', 'ineverriver', 'seawolf2357', 'openfree', 'aiqcamp', 'fantos', 'fantaxy', 'aiqtech', 'cutechicken', 'gunship999', 'immunobiotech', 'kolaslab', 'arabriabricks', 'calacuria2003', 'damerajee', 'John6666', 'joseph-bou', 'LiraMirui', 'zaravelisa', 'sylvain471', 'quangto', 'Isma-Mcclane'], 'count': 25}, {'reaction': '🚀', 'users': ['cowardpte', 'michaelangel', 'algebradavid', 'ineverriver', 'seawolf2357', 'openfree', 'ginipick', 'aiqcamp', 'fantos', 'fantaxy', 'cutechicken', 'gunship999', 'kolaslab', 'arabriabricks', 'calacuria2003', 'damerajee', 'joseph-bou', 'quangto'], 'count': 18}, {'reaction': '👀', 'users': ['cowardpte', 'michaelangel', 'algebradavid', 'seawolf2357', 'ginipick', 'aiqcamp', 'fantos', 'fantaxy', 'cutechicken', 'gunship999', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003', 'damerajee'], 'count': 15}, {'reaction': '❤️', 'users': ['cowardpte', 'michaelangel', 'seawolf2357', 'ginipick', 'aiqcamp', 'cutechicken', 'gunship999', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003', 'Akseltinfat', 'ENGDele', 'dantezxcd'], 'count': 14}, {'reaction': '🤗', 'users': ['cowardpte', 'seawolf2357', 'ginipick', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003', 'damerajee', 'Kaif77'], 'count': 9}, {'reaction': '😎', 'users': ['cowardpte', 'seawolf2357', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003', 'damerajee'], 'count': 7}, {'reaction': '👍', 'users': ['cowardpte', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003', 'yasserrmd', 'AnneLindberg94'], 'count': 7}, {'reaction': '🧠', 'users': ['cowardpte', 'seawolf2357', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003'], 'count': 6}, {'reaction': '➕', 'users': ['cowardpte', 'seawolf2357', 'kolaslab', 'openfree', 'arabriabricks', 'calacuria2003'], 'count': 6}, {'reaction': '🤯', 'users': ['cowardpte', 'kolaslab', 'openfree', 'arabriabricks'], 'count': 4}, {'reaction': '😔', 'users': ['cowardpte', 'kolaslab', 'openfree', 'arabriabricks'], 'count': 4}, {'reaction': '🤝', 'users': ['cowardpte', 'kolaslab', 'openfree', 'arabriabricks'], 'count': 4}]",2025-02-16 11:22:37,2025-02-16 11:22:37.401,[],/posts/ginipick/539440985640088,5958,"{'language': 'en', 'probability': 0.71873539686203}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,506726541208102,"[{'type': 'text', 'value': 'Dataset descriptions for trending Hugging Face datasets? Powered by a Smol model ', 'raw': 'Dataset descriptions for trending Hugging Face datasets? Powered by a Smol model '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'davanstrien/Smol-Hub-tldr'}, 'url': 'https://huggingface.co/davanstrien/Smol-Hub-tldr', 'raw': 'https://huggingface.co/davanstrien/Smol-Hub-tldr'}]",Dataset descriptions for trending Hugging Face datasets? Powered by a Smol model https://huggingface.co/davanstrien/Smol-Hub-tldr,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60107b385ac3e86b3ea4fc34/DkGVOyCGgFZCAlXJpaGtA.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'prithivMLmods', 'ant-des', 'dantezxcd'], 'count': 4}]",2025-02-16 10:41:34,2025-02-16 10:41:34.810,[],/posts/davanstrien/506726541208102,2285,"{'language': 'en', 'probability': 0.6523264646530151}",0 +/avatars/937a64aea8fde2f41a065f052b39f409.svg,48.0,alkinun,AtAndDev,666626149998359,"[{'type': 'mention', 'user': 'nroggendorff', 'raw': '@nroggendorff'}, {'type': 'text', 'value': ' is that you sama?', 'raw': ' is that you sama?'}]",@nroggendorff is that you sama?,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/630f3e4002ce39336c411048/T6PQOsB0l3WwkBJ-hWPkB.png'}]","[{'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227}]","[{'reaction': '😎', 'users': ['John6666', 'clem', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['John6666'], 'count': 1}, {'reaction': '🤝', 'users': ['Davidsv'], 'count': 1}]",2025-02-15 18:55:33,2025-02-16 06:25:49.893,"[{'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}]",/posts/AtAndDev/666626149998359,2483,"{'language': 'en', 'probability': 0.8954522609710693}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,657848271944936,"[{'type': 'text', 'value': '📢 For those who consider a quick and inplace annotation of entities in JSON / CSV tabular data, I got a good news. So far releasing the latest version of the bulk-ner which does these things for you:', 'raw': '📢 For those who consider a quick and inplace annotation of entities in JSON / CSV tabular data, I got a good news. So far releasing the latest version of the bulk-ner which does these things for you:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 ', 'raw': '🌟 '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-ner/releases/tag/0.25.2', 'raw': 'https://github.com/nicolay-r/bulk-ner/releases/tag/0.25.2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'bulk-ner is a no-string wrapper over NER service using popular frameworks like DeepPavlov, Spacy, Flair.', 'raw': 'bulk-ner is a no-string wrapper over NER service using popular frameworks like DeepPavlov, Spacy, Flair.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""What's new? The latest 0.25.2 version has the following key features:"", 'raw': ""What's new? The latest 0.25.2 version has the following key features:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔧 Fixed: 🐛 the output ignores other input content in input #31', 'raw': '🔧 Fixed: 🐛 the output ignores other input content in input #31'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥 Schemas support: you can annotate various coulmns by combining them as you wish and map onto the other output colums (see 📸 below) #28', 'raw': '🔥 Schemas support: you can annotate various coulmns by combining them as you wish and map onto the other output colums (see 📸 below) #28'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Below is the screenshot on how you can quick start of using it with Spacy models.', 'raw': 'Below is the screenshot on how you can quick start of using it with Spacy models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌌 List of other providers @ nlp-thirdgate:', 'raw': '🌌 List of other providers @ nlp-thirdgate:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/nicolay-r/nlp-thirdgate/tree/master/ner', 'raw': 'https://github.com/nicolay-r/nlp-thirdgate/tree/master/ner'}]","📢 For those who consider a quick and inplace annotation of entities in JSON / CSV tabular data, I got a good news. So far releasing the latest version of the bulk-ner which does these things for you: +🌟 https://github.com/nicolay-r/bulk-ner/releases/tag/0.25.2 + +bulk-ner is a no-string wrapper over NER service using popular frameworks like DeepPavlov, Spacy, Flair. + +What's new? The latest 0.25.2 version has the following key features: +🔧 Fixed: 🐛 the output ignores other input content in input #31 +🔥 Schemas support: you can annotate various coulmns by combining them as you wish and map onto the other output colums (see 📸 below) #28 + +Below is the screenshot on how you can quick start of using it with Spacy models. + +🌌 List of other providers @ nlp-thirdgate: +https://github.com/nicolay-r/nlp-thirdgate/tree/master/ner","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/RRLTouF6LOE69_3hSXuFA.png'}]",[],"[{'reaction': '🤝', 'users': ['prithivMLmods', 'John6666', 'Fishtiks', 'Davidsv', 'dantezxcd'], 'count': 5}]",2025-02-15 18:49:07,2025-02-15 18:49:07.784,[],/posts/nicolay-r/657848271944936,2420,"{'language': 'en', 'probability': 0.8763673901557922}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,464265972064174,"[{'type': 'text', 'value': 'hello, dev mode explorers!', 'raw': 'hello, dev mode explorers!'}]","hello, dev mode explorers!",[],[],"[{'reaction': '🤗', 'users': ['nroggendorff', 'pepper13', 'plozia', 'pabloce', 'AtAndDev', 'John6666', 'Davidsv', 'NeoPy', 'dantezxcd'], 'count': 9}]",2025-02-15 16:47:36,2025-02-17 13:45:35.232,"[{'_id': '66fe8fb27d722f0879b4631f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg', 'fullname': 'Sk md saad amin', 'name': 'Reality123b', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}, {'_id': '64585831aa36a63c42a084cc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/fUdf7f8t7aA5w8Ytdy7Qv.png', 'fullname': 'tao,yi', 'name': 'etaooo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/nroggendorff/464265972064174,2844,"{'language': 'en', 'probability': 0.5269438624382019}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/665085fcf074e4fd74042982/d0jxN8BCZz1dzcu3dyM2e.png,9.0,Benhao Tang,benhaotang,322538825901593,"[{'type': 'text', 'value': 'Try out my updated implementation of forked OpenDeepResearcher(link below) as an OpenAI compatible endpoint, but with full control, can be deployed completely free with Gemini api or completely locally with ollama, or pay-as-you-go in BYOK format, the AI agents will think dynamically based on the difficulties of given research, compatible with any OpenAI compatible configurable clients(Msty, Chatbox, even vscode AI Toolkit playground). ', 'raw': 'Try out my updated implementation of forked OpenDeepResearcher(link below) as an OpenAI compatible endpoint, but with full control, can be deployed completely free with Gemini api or completely locally with ollama, or pay-as-you-go in BYOK format, the AI agents will think dynamically based on the difficulties of given research, compatible with any OpenAI compatible configurable clients(Msty, Chatbox, even vscode AI Toolkit playground). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you don't want to pay OpenAI $200 to use or want to take control of your deep research, check out here: "", 'raw': ""If you don't want to pay OpenAI $200 to use or want to take control of your deep research, check out here: ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://github.com/benhaotang/OpenDeepResearcher-via-searxng', 'raw': 'https://github.com/benhaotang/OpenDeepResearcher-via-searxng'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '**Personal take**', 'raw': '**Personal take**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Based on my testing against Perplexity's and Gemini's implementation with some Physics domain questions, mine is comparable and very competent at finding even the most rare articles or methods."", 'raw': ""Based on my testing against Perplexity's and Gemini's implementation with some Physics domain questions, mine is comparable and very competent at finding even the most rare articles or methods.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also a funny benchmark of mine to test all these searching models, is to trouble shot a WSL2 hanging issue I experienced last year, with prompt:', 'raw': 'Also a funny benchmark of mine to test all these searching models, is to trouble shot a WSL2 hanging issue I experienced last year, with prompt:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> wsl2 in windows hangs in background with high vmmem cpu usage once in a while, especially after hibernation, no error logs captured in linux, also unable to shutdown in powershell, provide solutions', 'raw': '> wsl2 in windows hangs in background with high vmmem cpu usage once in a while, especially after hibernation, no error logs captured in linux, also unable to shutdown in powershell, provide solutions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""the final solution that took me a day last year to find is to patch the kernel with some steps documented in carlfriedrich's repo and wait Microsoft to solve it(it is buried deep in wsl issues). Out of the three, only my Deep Research agent has found this solution, Perplexity and Gemini just focus on other force restart or memory management methods. I am very impressed with how it has this kind of obscure and scarce trouble shooting ability."", 'raw': ""the final solution that took me a day last year to find is to patch the kernel with some steps documented in carlfriedrich's repo and wait Microsoft to solve it(it is buried deep in wsl issues). Out of the three, only my Deep Research agent has found this solution, Perplexity and Gemini just focus on other force restart or memory management methods. I am very impressed with how it has this kind of obscure and scarce trouble shooting ability.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '**Limitations**', 'raw': '**Limitations**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Some caveats to be done later:', 'raw': 'Some caveats to be done later:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multi-turn conversation is not yet supported, so no follow-up questions', 'raw': '- Multi-turn conversation is not yet supported, so no follow-up questions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- System message is only extra writing instructions, don't affect on search"", 'raw': ""- System message is only extra writing instructions, don't affect on search""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Small local model may have trouble citing source reliably, I am working on a fix to fact check all citation claims', 'raw': '- Small local model may have trouble citing source reliably, I am working on a fix to fact check all citation claims'}]","Try out my updated implementation of forked OpenDeepResearcher(link below) as an OpenAI compatible endpoint, but with full control, can be deployed completely free with Gemini api or completely locally with ollama, or pay-as-you-go in BYOK format, the AI agents will think dynamically based on the difficulties of given research, compatible with any OpenAI compatible configurable clients(Msty, Chatbox, even vscode AI Toolkit playground). + +If you don't want to pay OpenAI $200 to use or want to take control of your deep research, check out here: +👉 https://github.com/benhaotang/OpenDeepResearcher-via-searxng + +**Personal take** + +Based on my testing against Perplexity's and Gemini's implementation with some Physics domain questions, mine is comparable and very competent at finding even the most rare articles or methods. + +Also a funny benchmark of mine to test all these searching models, is to trouble shot a WSL2 hanging issue I experienced last year, with prompt: + +> wsl2 in windows hangs in background with high vmmem cpu usage once in a while, especially after hibernation, no error logs captured in linux, also unable to shutdown in powershell, provide solutions + +the final solution that took me a day last year to find is to patch the kernel with some steps documented in carlfriedrich's repo and wait Microsoft to solve it(it is buried deep in wsl issues). Out of the three, only my Deep Research agent has found this solution, Perplexity and Gemini just focus on other force restart or memory management methods. I am very impressed with how it has this kind of obscure and scarce trouble shooting ability. + +**Limitations** + +Some caveats to be done later: +- Multi-turn conversation is not yet supported, so no follow-up questions +- System message is only extra writing instructions, don't affect on search +- Small local model may have trouble citing source reliably, I am working on a fix to fact check all citation claims","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/665085fcf074e4fd74042982/sRXTCP-WDrTOrZvCDd25E.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/665085fcf074e4fd74042982/45nXwoU6wIAnQtnctSXtC.png'}]",[],"[{'reaction': '🔥', 'users': ['DeFactOfficial', 'jojohannsen', 'Fishtiks', 'win10', 'Jukka-Sun', 'kepler-78b', 'dantezxcd'], 'count': 7}, {'reaction': '🚀', 'users': ['nicolay-r', 'John6666', 'Fishtiks'], 'count': 3}, {'reaction': '👍', 'users': ['kukurick'], 'count': 1}]",2025-02-15 14:20:27,2025-02-19 22:54:02.406,"[{'_id': '665085fcf074e4fd74042982', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/665085fcf074e4fd74042982/d0jxN8BCZz1dzcu3dyM2e.png', 'fullname': 'Benhao Tang', 'name': 'benhaotang', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}]",/posts/benhaotang/322538825901593,2462,"{'language': 'en', 'probability': 0.9208125472068787}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1658676776546-5f0de36419cb630495b8153c.jpeg,19.0,Tony Zhao,tianchez,384417618281589,"[{'type': 'text', 'value': 'Introducing VLM-R1!', 'raw': 'Introducing VLM-R1!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GRPO has helped DeepSeek R1 to learn reasoning. Can it also help VLMs perform stronger for general computer vision tasks?', 'raw': 'GRPO has helped DeepSeek R1 to learn reasoning. Can it also help VLMs perform stronger for general computer vision tasks?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The answer is YES and it generalizes better than SFT. We trained Qwen 2.5 VL 3B on RefCOCO (a visual grounding task) and eval on RefCOCO Val and RefGTA (an OOD task). ', 'raw': 'The answer is YES and it generalizes better than SFT. We trained Qwen 2.5 VL 3B on RefCOCO (a visual grounding task) and eval on RefCOCO Val and RefGTA (an OOD task). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/om-ai-lab/VLM-R1', 'raw': 'https://github.com/om-ai-lab/VLM-R1'}]","Introducing VLM-R1! + +GRPO has helped DeepSeek R1 to learn reasoning. Can it also help VLMs perform stronger for general computer vision tasks? + +The answer is YES and it generalizes better than SFT. We trained Qwen 2.5 VL 3B on RefCOCO (a visual grounding task) and eval on RefCOCO Val and RefGTA (an OOD task). + +https://github.com/om-ai-lab/VLM-R1","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f0de36419cb630495b8153c/vZMZ2p5YEQoNahYLMKOuz.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['tianchez', 'John6666', 'nicolay-r', 'danielfl', 'byoussef', 'seyf1elislam', 'KillerShoaib', 'PopCat19', 'yongheng007', 'qq-hzlh', 'kyusonglee', 'P3ngLiu', 'Tonic', 'wuziheng', 'jan-hq', 'mbiswas', 'dantezxcd', 'muhtasham'], 'count': 18}, {'reaction': '👍', 'users': ['BuiDoan', 'PopCat19', 'qq-hzlh', 'panregedit', 'P3ngLiu', 'Jukka-Sun', 'Tonic', 'Chuanming', 'tianchez'], 'count': 9}, {'reaction': '🔥', 'users': ['tianchez', 'Tonic', 'tunglinwu'], 'count': 3}, {'reaction': '❤️', 'users': ['tianchez', 'Tonic'], 'count': 2}]",2025-02-15 12:35:12,2025-02-27 09:49:39.540,"[{'_id': '62d7b2339b629105a5d6888a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62d7b2339b629105a5d6888a/pYi994fqzXctgoed79oUo.png', 'fullname': 'Alan Dao', 'name': 'alandao', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 17, 'isFollowing': False}, {'_id': '5f0de36419cb630495b8153c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1658676776546-5f0de36419cb630495b8153c.jpeg', 'fullname': 'Tony Zhao', 'name': 'tianchez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 19, 'isFollowing': False}, {'_id': '63a45239412fd71fb7f2dc48', 'avatarUrl': '/avatars/40cb9c7ce0b150d121dac3fc9dd09e17.svg', 'fullname': 'Mainak Biswas', 'name': 'mbiswas', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/tianchez/384417618281589,4482,"{'language': 'en', 'probability': 0.9045408964157104}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg,77.0,Kenneth Hamilton,ZennyKenny,584467772865203,"[{'type': 'text', 'value': ""I've completed the first unit of the just-launched Hugging Face Agents Course. I would highly recommend it, even for experienced builders, because it is a great walkthrough of the smolagents library and toolkit."", 'raw': ""I've completed the first unit of the just-launched Hugging Face Agents Course. I would highly recommend it, even for experienced builders, because it is a great walkthrough of the smolagents library and toolkit.""}]","I've completed the first unit of the just-launched Hugging Face Agents Course. I would highly recommend it, even for experienced builders, because it is a great walkthrough of the smolagents library and toolkit.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/656e3808d4de03a07d116850/1x8XpwfDCsoeOpeRNv1j4.webp'}]",[],"[{'reaction': '🔥', 'users': ['FerrariFer', 'J33L', 'HPositive', 'John6666', 'davidberenstein1957', 'DamarJati', 'ysharma', 'rchfullstack', 'erinmikail', 'nes74', 'CharisTheAI', 'lc2004', 'vickzk', 'ZyAi', 'natalika'], 'count': 15}, {'reaction': '🤗', 'users': ['John6666', 'davidberenstein1957', 'erinmikail', 'oieieio', 'natalika'], 'count': 5}, {'reaction': '😎', 'users': ['John6666', 'davidberenstein1957', 'erinmikail', 'dantezxcd'], 'count': 4}, {'reaction': '❤️', 'users': ['davidberenstein1957', 'erinmikail', 'natalika'], 'count': 3}, {'reaction': '👍', 'users': ['willhsu', 'natalika'], 'count': 2}]",2025-02-11 21:53:11,2025-02-11 21:53:11.064,[],/posts/ZennyKenny/584467772865203,3477,"{'language': 'en', 'probability': 0.9610306024551392}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/655a505750b9a14799164a3a/TPbltGUpcxJW_LNLWGSpu.jpeg,18.0,Eliseu Silva,elismasilva,251775641926329,"[{'type': 'text', 'value': 'Mixture-of-Diffusers pipeline tiling for SDXL', 'raw': 'Mixture-of-Diffusers pipeline tiling for SDXL'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This strives to provide a better tool for image composition by using several diffusion processes in parallel, each configured with a specific prompt and settings, and focused on a particular region of the image. The mixture of diffusion processes is done in a way that harmonizes the generation process, preventing ""seam"" effects in the generated image. Using several diffusion processes in parallel has also practical advantages when generating very large images, as the GPU memory requirements are similar to that of generating an image of the size of a single tile. ', 'raw': 'This strives to provide a better tool for image composition by using several diffusion processes in parallel, each configured with a specific prompt and settings, and focused on a particular region of the image. The mixture of diffusion processes is done in a way that harmonizes the generation process, preventing ""seam"" effects in the generated image. Using several diffusion processes in parallel has also practical advantages when generating very large images, as the GPU memory requirements are similar to that of generating an image of the size of a single tile. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'elismasilva/mixture-of-diffusers-sdxl-tiling'}, 'url': 'https://huggingface.co/spaces/elismasilva/mixture-of-diffusers-sdxl-tiling', 'raw': 'https://huggingface.co/spaces/elismasilva/mixture-of-diffusers-sdxl-tiling'}, {'type': 'new_line', 'raw': '\n'}]","Mixture-of-Diffusers pipeline tiling for SDXL + +This strives to provide a better tool for image composition by using several diffusion processes in parallel, each configured with a specific prompt and settings, and focused on a particular region of the image. The mixture of diffusion processes is done in a way that harmonizes the generation process, preventing ""seam"" effects in the generated image. Using several diffusion processes in parallel has also practical advantages when generating very large images, as the GPU memory requirements are similar to that of generating an image of the size of a single tile. + +https://huggingface.co/spaces/elismasilva/mixture-of-diffusers-sdxl-tiling +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/655a505750b9a14799164a3a/6HgpVMJVzDXEFJLN7eyBF.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'Florianpelier31', 'lxrenxl', 'night-codes', 'djuna', 'J33L', 'RochyRoch', 'dilanka12', 'jdagh', 'elismasilva'], 'count': 10}, {'reaction': '👀', 'users': ['nofl', 'dantezxcd'], 'count': 2}]",2025-02-11 13:25:36,2025-02-11 13:27:19.381,[],/posts/elismasilva/251775641926329,3665,"{'language': 'en', 'probability': 0.8967143297195435}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,776720011919298,"[{'type': 'text', 'value': 'Time Stream ⏳🚀', 'raw': 'Time Stream ⏳🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Time Stream is a groundbreaking AI tool that transforms your text into a mesmerizing video journey from the past to the future. With this innovative technology, your ideas evolve over time, visualized through a dynamic image strip and a fluid video narrative. Imagine typing a simple prompt and watching as your words transform into vivid scenes that capture every moment of change—like a time machine for creativity! 🎥✨', 'raw': 'Time Stream is a groundbreaking AI tool that transforms your text into a mesmerizing video journey from the past to the future. With this innovative technology, your ideas evolve over time, visualized through a dynamic image strip and a fluid video narrative. Imagine typing a simple prompt and watching as your words transform into vivid scenes that capture every moment of change—like a time machine for creativity! 🎥✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Features: • Text-to-Video Transformation: Enter any text, and Time Stream converts it into a compelling video that travels through time, turning your ideas into a visual story. 📽️', 'raw': 'Key Features: • Text-to-Video Transformation: Enter any text, and Time Stream converts it into a compelling video that travels through time, turning your ideas into a visual story. 📽️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Dynamic Image Strip: Alongside the video, a vibrant image strip is created, showcasing each stage of the transformation so you can see every detail of the evolution. 📸', 'raw': '• Dynamic Image Strip: Alongside the video, a vibrant image strip is created, showcasing each stage of the transformation so you can see every detail of the evolution. 📸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Customizable Settings: Adjust parameters such as strength, guidance scale, and more to fine-tune your video’s appearance and ensure it perfectly matches your creative vision. ⚙️', 'raw': '• Customizable Settings: Adjust parameters such as strength, guidance scale, and more to fine-tune your video’s appearance and ensure it perfectly matches your creative vision. ⚙️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• User-Friendly Interface: With a modern and sleek design, Time Stream is incredibly easy to use. Its intuitive layout lets you focus on your creativity without any technical hurdles. 🖥️🌟', 'raw': '• User-Friendly Interface: With a modern and sleek design, Time Stream is incredibly easy to use. Its intuitive layout lets you focus on your creativity without any technical hurdles. 🖥️🌟'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Time Stream is perfect for artists, storytellers, designers, and anyone who loves to see their ideas come to life in new and exciting ways. Whether you’re reflecting on the past, celebrating the present, or dreaming about the future, Time Stream turns your narrative into a vivid, ever-changing masterpiece. Dive in and let your imagination soar as you journey through time, one image at a time! 🚀🔥', 'raw': 'Time Stream is perfect for artists, storytellers, designers, and anyone who loves to see their ideas come to life in new and exciting ways. Whether you’re reflecting on the past, celebrating the present, or dreaming about the future, Time Stream turns your narrative into a vivid, ever-changing masterpiece. Dive in and let your imagination soar as you journey through time, one image at a time! 🚀🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/Time-Stream'}, 'url': 'https://huggingface.co/spaces/ginipick/Time-Stream', 'raw': 'https://huggingface.co/spaces/ginipick/Time-Stream'}]","Time Stream ⏳🚀 + +Time Stream is a groundbreaking AI tool that transforms your text into a mesmerizing video journey from the past to the future. With this innovative technology, your ideas evolve over time, visualized through a dynamic image strip and a fluid video narrative. Imagine typing a simple prompt and watching as your words transform into vivid scenes that capture every moment of change—like a time machine for creativity! 🎥✨ + +Key Features: • Text-to-Video Transformation: Enter any text, and Time Stream converts it into a compelling video that travels through time, turning your ideas into a visual story. 📽️ +• Dynamic Image Strip: Alongside the video, a vibrant image strip is created, showcasing each stage of the transformation so you can see every detail of the evolution. 📸 +• Customizable Settings: Adjust parameters such as strength, guidance scale, and more to fine-tune your video’s appearance and ensure it perfectly matches your creative vision. ⚙️ +• User-Friendly Interface: With a modern and sleek design, Time Stream is incredibly easy to use. Its intuitive layout lets you focus on your creativity without any technical hurdles. 🖥️🌟 + +Time Stream is perfect for artists, storytellers, designers, and anyone who loves to see their ideas come to life in new and exciting ways. Whether you’re reflecting on the past, celebrating the present, or dreaming about the future, Time Stream turns your narrative into a vivid, ever-changing masterpiece. Dive in and let your imagination soar as you journey through time, one image at a time! 🚀🔥 + +https://huggingface.co/spaces/ginipick/Time-Stream","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/eOGaHk4htr5bPvCjA7Qlv.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/rznCOex9-qK7v3HtSYzlJ.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/3jU_JV9QMO-4qy31mffqm.mp4'}]",[],"[{'reaction': '🔥', 'users': ['ginipick', 'algebradavid', 'michaelangel', 'cowardpte', 'ineverriver', 'seawolf2357', 'aiqcamp', 'fantos', 'openfree', 'fantaxy', 'immunobiotech', 'aiqtech', 'kolaslab', 'cutechicken', 'gunship999', 'Williamson09', 'John6666', 'patrawtf'], 'count': 18}, {'reaction': '🚀', 'users': ['algebradavid', 'michaelangel', 'cowardpte', 'ineverriver', 'seawolf2357', 'fantos', 'ginipick', 'openfree', 'fantaxy', 'immunobiotech', 'aiqtech', 'cutechicken', 'gunship999', 'nicolay-r', 'Williamson09', 'dantezxcd'], 'count': 16}, {'reaction': '👀', 'users': ['algebradavid', 'michaelangel', 'cowardpte', 'seawolf2357', 'fantos', 'ginipick', 'fantaxy', 'aiqtech', 'cutechicken', 'openfree', 'Williamson09', 'Cryptoweak'], 'count': 12}, {'reaction': '❤️', 'users': ['algebradavid', 'cowardpte', 'fantos', 'ginipick', 'openfree', 'MomoRelmo', 'ashwinnaidu1991'], 'count': 7}, {'reaction': '➕', 'users': ['cowardpte', 'fantos', 'openfree', 'AuFt'], 'count': 4}, {'reaction': '🤗', 'users': ['algebradavid', 'ginipick', 'openfree'], 'count': 3}, {'reaction': '🧠', 'users': ['cowardpte', 'openfree'], 'count': 2}, {'reaction': '😎', 'users': ['cowardpte', 'openfree'], 'count': 2}, {'reaction': '😔', 'users': ['cowardpte', 'openfree'], 'count': 2}, {'reaction': '👍', 'users': ['cowardpte', 'openfree'], 'count': 2}, {'reaction': '🤯', 'users': ['cowardpte'], 'count': 1}, {'reaction': '🤝', 'users': ['cowardpte'], 'count': 1}]",2025-02-11 09:13:21,2025-02-12 11:16:31.788,[],/posts/ginipick/776720011919298,5528,"{'language': 'en', 'probability': 0.8764618039131165}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png,3314.0,ben burtenshaw,burtenshaw,457613029588941,"[{'type': 'text', 'value': 'The Hugging Face agents course is finally out! ', 'raw': 'The Hugging Face agents course is finally out! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'agents-course'}, 'url': 'https://huggingface.co/agents-course', 'raw': 'https://huggingface.co/agents-course', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60cae820b1c79a3e4b436664/gSOeYxuhnE0U0HFeh__wA.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This first unit of the course sets you up with all the fundamentals to become a pro in agents.', 'raw': 'This first unit of the course sets you up with all the fundamentals to become a pro in agents.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- What's an AI Agent?"", 'raw': ""- What's an AI Agent?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What are LLMs?', 'raw': '- What are LLMs?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Messages and Special Tokens', 'raw': '- Messages and Special Tokens'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Understanding AI Agents through the Thought-Action-Observation Cycle', 'raw': '- Understanding AI Agents through the Thought-Action-Observation Cycle'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Thought, Internal Reasoning and the Re-Act Approach', 'raw': '- Thought, Internal Reasoning and the Re-Act Approach'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Actions, Enabling the Agent to Engage with Its Environment', 'raw': '- Actions, Enabling the Agent to Engage with Its Environment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Observe, Integrating Feedback to Reflect and Adapt', 'raw': '- Observe, Integrating Feedback to Reflect and Adapt'}]","The Hugging Face agents course is finally out! + +👉 https://huggingface.co/agents-course + +This first unit of the course sets you up with all the fundamentals to become a pro in agents. + +- What's an AI Agent? +- What are LLMs? +- Messages and Special Tokens +- Understanding AI Agents through the Thought-Action-Observation Cycle +- Thought, Internal Reasoning and the Re-Act Approach +- Actions, Enabling the Agent to Engage with Its Environment +- Observe, Integrating Feedback to Reflect and Adapt",[],[],"[{'reaction': '🔥', 'users': ['akashthoriya1998', 'John6666', 'Youssef-c', 'herrhochhaus', 'ElTomo', 'prithivMLmods', 'sdiazlor', 'gungorbasa', 'Bapro', 'goldengate', 'FerrariFer', 'shbaydadaev', 'douglasWB', 'zhouzhangjian', 'thanhkt', 'shamikbosefj', 'annimukh', 'Jimlin94', 'mgogel', 'KeshavGubbi', 'Jeremy1997', 'arunsriraman91', 'nnennahacks', 'srinjoyMukherjee', 'onuralpszr', 'srijanjoshi', 'jparedesj', 'hojie11', 'nicholasjclamb', 'minhtuelun', 'dantezxcd'], 'count': 31}, {'reaction': '👍', 'users': ['krinal', 'shbaydadaev', 'dailywsx', 'douglasWB', 'YepItsJeremy', 'RaphaelBARTOUX', 'leyshr', 'fabio-garagiola', 'minhtuelun'], 'count': 9}, {'reaction': '🤗', 'users': ['John6666', 'zoe8888', 'plasmax7', 'shbaydadaev', 'douglasWB', 'minhtuelun'], 'count': 6}, {'reaction': '😎', 'users': ['John6666', 'Cryptoweak', 'shbaydadaev', 'douglasWB', 'minhtuelun'], 'count': 5}]",2025-02-11 07:45:52,2025-02-11 07:45:52.375,[],/posts/burtenshaw/457613029588941,9266,"{'language': 'en', 'probability': 0.8514158725738525}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1650745211725-noauth.png,55.0,Mohammed Hamdy,mmhamdy,809103223322610,"[{'type': 'text', 'value': '⛓ Evaluating Long Context #2: SCROLLS and ZeroSCROLLS', 'raw': '⛓ Evaluating Long Context #2: SCROLLS and ZeroSCROLLS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In this series of posts about tracing the history of long context evaluation, we started with Long Range Arena (LRA). Introduced in 2020, Long Range Arens (LRA) is one of the earliest benchmarks designed to tackle the challenge of long context evaluation. But it wasn't introduced to evaluate LLMs, but rather the transformer architecture in general."", 'raw': ""In this series of posts about tracing the history of long context evaluation, we started with Long Range Arena (LRA). Introduced in 2020, Long Range Arens (LRA) is one of the earliest benchmarks designed to tackle the challenge of long context evaluation. But it wasn't introduced to evaluate LLMs, but rather the transformer architecture in general.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📜 The SCROLLS benchmark, introduced in 2022, addresses this gap in NLP/LLM research. SCROLLS challenges models with tasks that require reasoning over extended sequences (according to 2022 standards). So, what does it offer?', 'raw': '📜 The SCROLLS benchmark, introduced in 2022, addresses this gap in NLP/LLM research. SCROLLS challenges models with tasks that require reasoning over extended sequences (according to 2022 standards). So, what does it offer?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""1️⃣ Long Text Focus: SCROLLS (unlike LRA) focus mainly on text and contain inputs with thousands of words, testing models' ability to synthesize information across lengthy documents."", 'raw': ""1️⃣ Long Text Focus: SCROLLS (unlike LRA) focus mainly on text and contain inputs with thousands of words, testing models' ability to synthesize information across lengthy documents.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Diverse Tasks: Includes summarization, question answering, and natural language inference across domains like literature, science, and business.', 'raw': '2️⃣ Diverse Tasks: Includes summarization, question answering, and natural language inference across domains like literature, science, and business.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Unified Format: All datasets are available in a text-to-text format, facilitating easy evaluation and comparison of models.', 'raw': '3️⃣ Unified Format: All datasets are available in a text-to-text format, facilitating easy evaluation and comparison of models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Building on SCROLLS, ZeroSCROLLS takes long text evaluation to the next level by focusing on zero-shot learning. Other features include:', 'raw': 'Building on SCROLLS, ZeroSCROLLS takes long text evaluation to the next level by focusing on zero-shot learning. Other features include:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ New Tasks: Introduces tasks like sentiment aggregation and sorting book chapter summaries.', 'raw': '1️⃣ New Tasks: Introduces tasks like sentiment aggregation and sorting book chapter summaries.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Leaderboard: A live leaderboard encourages continuous improvement and competition among researchers.', 'raw': '2️⃣ Leaderboard: A live leaderboard encourages continuous improvement and competition among researchers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 What are some other landmark benchmarks in the history of long context evaluation? Feel free to share your thoughts and suggestions in the comments.', 'raw': '💡 What are some other landmark benchmarks in the history of long context evaluation? Feel free to share your thoughts and suggestions in the comments.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SCROLLS Paper: ', 'raw': '- SCROLLS Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2201.03533'}, 'url': 'https://huggingface.co/papers/2201.03533', 'raw': 'https://huggingface.co/papers/2201.03533', 'label': 'SCROLLS: Standardized CompaRison Over Long Language Sequences (2201.03533)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ZeroSCROLLS Paper: ', 'raw': '- ZeroSCROLLS Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2305.14196'}, 'url': 'https://huggingface.co/papers/2305.14196', 'raw': 'https://huggingface.co/papers/2305.14196', 'label': 'ZeroSCROLLS: A Zero-Shot Benchmark for Long Text Understanding (2305.14196)'}]","⛓ Evaluating Long Context #2: SCROLLS and ZeroSCROLLS + +In this series of posts about tracing the history of long context evaluation, we started with Long Range Arena (LRA). Introduced in 2020, Long Range Arens (LRA) is one of the earliest benchmarks designed to tackle the challenge of long context evaluation. But it wasn't introduced to evaluate LLMs, but rather the transformer architecture in general. + +📜 The SCROLLS benchmark, introduced in 2022, addresses this gap in NLP/LLM research. SCROLLS challenges models with tasks that require reasoning over extended sequences (according to 2022 standards). So, what does it offer? + +1️⃣ Long Text Focus: SCROLLS (unlike LRA) focus mainly on text and contain inputs with thousands of words, testing models' ability to synthesize information across lengthy documents. +2️⃣ Diverse Tasks: Includes summarization, question answering, and natural language inference across domains like literature, science, and business. +3️⃣ Unified Format: All datasets are available in a text-to-text format, facilitating easy evaluation and comparison of models. + +Building on SCROLLS, ZeroSCROLLS takes long text evaluation to the next level by focusing on zero-shot learning. Other features include: + +1️⃣ New Tasks: Introduces tasks like sentiment aggregation and sorting book chapter summaries. +2️⃣ Leaderboard: A live leaderboard encourages continuous improvement and competition among researchers. + +💡 What are some other landmark benchmarks in the history of long context evaluation? Feel free to share your thoughts and suggestions in the comments. + +- SCROLLS Paper: https://huggingface.co/papers/2201.03533 +- ZeroSCROLLS Paper: https://huggingface.co/papers/2305.14196","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62645f88c39850dc093d6105/bg7P1eRs6epnL-QfxYM9J.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'innovation64', 'carl-xxxxl', 'nicolay-r', 'dantezxcd'], 'count': 5}, {'reaction': '👍', 'users': ['krinal', 'DrishtiSharma', 'fractalego'], 'count': 3}]",2025-02-11 05:55:10,2025-02-11 05:55:10.315,[],/posts/mmhamdy/809103223322610,2992,"{'language': 'en', 'probability': 0.8786360025405884}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,399265037013673,"[{'type': 'text', 'value': ""Raiden is here! 63k creative-reasoning and analytic-reasoning prompts answered by DeepSeek's 685b R1 model!"", 'raw': ""Raiden is here! 63k creative-reasoning and analytic-reasoning prompts answered by DeepSeek's 685b R1 model!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- All prompts from ', 'raw': '- All prompts from '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'microsoft/orca-agentinstruct-1M-v1'}, 'url': 'https://huggingface.co/datasets/microsoft/orca-agentinstruct-1M-v1', 'raw': 'https://huggingface.co/datasets/microsoft/orca-agentinstruct-1M-v1'}, {'type': 'text', 'value': ' and all responses from ', 'raw': ' and all responses from '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-R1'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-R1', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-R1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- A deep look at R1's reasoning skills! Use as you will."", 'raw': ""- A deep look at R1's reasoning skills! Use as you will.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Get it now: ', 'raw': 'Get it now: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'sequelbox/Raiden-DeepSeek-R1'}, 'url': 'https://huggingface.co/datasets/sequelbox/Raiden-DeepSeek-R1', 'raw': 'https://huggingface.co/datasets/sequelbox/Raiden-DeepSeek-R1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'for everyone :)', 'raw': 'for everyone :)'}]","Raiden is here! 63k creative-reasoning and analytic-reasoning prompts answered by DeepSeek's 685b R1 model! + +- All prompts from https://huggingface.co/datasets/microsoft/orca-agentinstruct-1M-v1 and all responses from https://huggingface.co/deepseek-ai/DeepSeek-R1 +- A deep look at R1's reasoning skills! Use as you will. + +Get it now: https://huggingface.co/datasets/sequelbox/Raiden-DeepSeek-R1 + +for everyone :)",[],[],"[{'reaction': '👀', 'users': ['John6666', 'nicolay-r', 'J33L', 'dantezxcd'], 'count': 4}, {'reaction': '🧠', 'users': ['nicolay-r', 'Fishtiks', 'LiraMirui'], 'count': 3}, {'reaction': '👍', 'users': ['Mindweller', 'notbdq'], 'count': 2}, {'reaction': '🚀', 'users': ['zoeywin'], 'count': 1}, {'reaction': '🔥', 'users': ['NickyNicky'], 'count': 1}]",2025-02-11 05:13:08,2025-02-11 05:13:08.312,[],/posts/sequelbox/399265037013673,2827,"{'language': 'en', 'probability': 0.8100074529647827}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/673fd3588bb7a091b2c9f4f5/MFGRfiotAtoBWBxjuCCYI.png,20.0,Melons,retronic,506314907049610,"[{'type': 'text', 'value': 'The Colox idea is getting replaced with a clone of OpenAI Deep Research due to retraining issues and reasoning issues', 'raw': 'The Colox idea is getting replaced with a clone of OpenAI Deep Research due to retraining issues and reasoning issues'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""So now I am working on a Deep Research system with Ollama that will function like OpenAI's version for FREE! This will be a local alternative, no potato PC can handle this keep in mind."", 'raw': ""So now I am working on a Deep Research system with Ollama that will function like OpenAI's version for FREE! This will be a local alternative, no potato PC can handle this keep in mind.""}]","The Colox idea is getting replaced with a clone of OpenAI Deep Research due to retraining issues and reasoning issues + +So now I am working on a Deep Research system with Ollama that will function like OpenAI's version for FREE! This will be a local alternative, no potato PC can handle this keep in mind.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'dantezxcd'], 'count': 2}, {'reaction': '👍', 'users': ['KasperLover'], 'count': 1}, {'reaction': '🤝', 'users': ['Mindweller'], 'count': 1}]",2025-02-11 01:21:12,2025-02-11 01:21:12.627,[],/posts/retronic/506314907049610,1830,"{'language': 'en', 'probability': 0.9082497954368591}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,354139618405592,"[{'type': 'text', 'value': 'Dearest None-yet Team,', 'raw': 'Dearest None-yet Team,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I couldn't help but notice that our productivity has room for improvement. To address this, we will be engaging in a company-wide morale-building activity designed to boost teamwork, enthusiasm, and *most importantly* results."", 'raw': ""I couldn't help but notice that our productivity has room for improvement. To address this, we will be engaging in a company-wide morale-building activity designed to boost teamwork, enthusiasm, and *most importantly* results.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I know you're all as excited as I am for this fun and absolutely required initiative. Participation is not just encouraged, it's mandatory. Think of it as a team-bonding experience you never signed up for but will absolutely tolerate."", 'raw': ""I know you're all as excited as I am for this fun and absolutely required initiative. Participation is not just encouraged, it's mandatory. Think of it as a team-bonding experience you never signed up for but will absolutely tolerate.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details to follow, but for now, mark your calendars and prepare for an engaging experience that will definitely make us all better, stronger, and more synchronized, or at least give us something to talk about later.', 'raw': 'More details to follow, but for now, mark your calendars and prepare for an engaging experience that will definitely make us all better, stronger, and more synchronized, or at least give us something to talk about later.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Looking forward to seeing you all there!', 'raw': 'Looking forward to seeing you all there!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Best,', 'raw': 'Best,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Me', 'raw': 'Me'}]","Dearest None-yet Team, + +I couldn't help but notice that our productivity has room for improvement. To address this, we will be engaging in a company-wide morale-building activity designed to boost teamwork, enthusiasm, and *most importantly* results. + +I know you're all as excited as I am for this fun and absolutely required initiative. Participation is not just encouraged, it's mandatory. Think of it as a team-bonding experience you never signed up for but will absolutely tolerate. + +More details to follow, but for now, mark your calendars and prepare for an engaging experience that will definitely make us all better, stronger, and more synchronized, or at least give us something to talk about later. + +Looking forward to seeing you all there! + +Best, +Me",[],[],"[{'reaction': '👍', 'users': ['talumbau', 'John6666', 'IvandeMurard', 'Smorty100', 'dantezxcd'], 'count': 5}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-02-10 22:00:22,2025-02-15 17:19:14.054,"[{'_id': '643b19f8a856622f978df30f', 'avatarUrl': '/avatars/c82779fdf94f80cdb5020504f83c818b.svg', 'fullname': 'Yatharth Sharma', 'name': 'YaTharThShaRma999', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 20, 'isFollowing': False}, {'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '66fe8fb27d722f0879b4631f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg', 'fullname': 'Sk md saad amin', 'name': 'Reality123b', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}]",/posts/nroggendorff/354139618405592,2667,"{'language': 'en', 'probability': 0.9625351428985596}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/y1W6Co4jIYB95Cx6Tjrsd.jpeg,62.0,Muhammad Imran Zaman,ImranzamanML,161215925152068,"[{'type': 'text', 'value': 'Hugging Face just launched the AI Agents Course – a free journey from beginner to expert in AI agents! ', 'raw': 'Hugging Face just launched the AI Agents Course – a free journey from beginner to expert in AI agents! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Learn AI Agent fundamentals, use cases and frameworks ', 'raw': '- Learn AI Agent fundamentals, use cases and frameworks '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Use top libraries like LangChain & LlamaIndex', 'raw': '- Use top libraries like LangChain & LlamaIndex'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Compete in challenges & earn a certificate', 'raw': '- Compete in challenges & earn a certificate'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Hands-on projects & real-world applications', 'raw': '- Hands-on projects & real-world applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/learn/agents-course/unit0/introduction', 'raw': 'https://huggingface.co/learn/agents-course/unit0/introduction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can join for a live Q&A on Feb 12 at 5PM CET to learn more about the course here ', 'raw': 'You can join for a live Q&A on Feb 12 at 5PM CET to learn more about the course here '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.youtube.com/live/PopqUt3MGyQ', 'raw': 'https://www.youtube.com/live/PopqUt3MGyQ'}]","Hugging Face just launched the AI Agents Course – a free journey from beginner to expert in AI agents! + +- Learn AI Agent fundamentals, use cases and frameworks +- Use top libraries like LangChain & LlamaIndex +- Compete in challenges & earn a certificate +- Hands-on projects & real-world applications + +https://huggingface.co/learn/agents-course/unit0/introduction + +You can join for a live Q&A on Feb 12 at 5PM CET to learn more about the course here + +https://www.youtube.com/live/PopqUt3MGyQ",[],[],"[{'reaction': '👍', 'users': ['nicolay-r', 'John6666', 'khuongduy1897', 'NorthernStar', 'HuggyMonkey', 'beta3', 'slavgtuk', 'KrisKale45', 'hirugohan', 'dantezxcd'], 'count': 10}, {'reaction': '😎', 'users': ['John6666', 'slavgtuk', 'louisbrulenaudet', 'dantezxcd'], 'count': 4}, {'reaction': '❤️', 'users': ['khuongduy1897', 'Fonk', 'Cptcoffee'], 'count': 3}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}]",2025-02-10 20:34:29,2025-02-10 20:34:29.062,[],/posts/ImranzamanML/161215925152068,3281,"{'language': 'en', 'probability': 0.7670823931694031}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,480730425842027,"[{'type': 'text', 'value': 'Today is a big day for the Arabic Language,', 'raw': 'Today is a big day for the Arabic Language,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We have ', 'raw': 'We have '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Navid-AI/The-Arabic-Rag-Leaderboard'}, 'url': 'https://huggingface.co/spaces/Navid-AI/The-Arabic-Rag-Leaderboard', 'raw': 'https://huggingface.co/spaces/Navid-AI/The-Arabic-Rag-Leaderboard'}, {'type': 'text', 'value': ',', 'raw': ','}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'an Update for ', 'raw': 'an Update for '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'OALL/Open-Arabic-LLM-Leaderboard'}, 'url': 'https://huggingface.co/spaces/OALL/Open-Arabic-LLM-Leaderboard', 'raw': 'https://huggingface.co/spaces/OALL/Open-Arabic-LLM-Leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and the release of ', 'raw': 'and the release of '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'atlasia/darija-chatbot-arena'}, 'url': 'https://huggingface.co/spaces/atlasia/darija-chatbot-arena', 'raw': 'https://huggingface.co/spaces/atlasia/darija-chatbot-arena'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All of this announcements was under 12 hours of time 🤯', 'raw': 'All of this announcements was under 12 hours of time 🤯'}]","Today is a big day for the Arabic Language, + +We have https://huggingface.co/spaces/Navid-AI/The-Arabic-Rag-Leaderboard, +an Update for https://huggingface.co/spaces/OALL/Open-Arabic-LLM-Leaderboard +and the release of https://huggingface.co/spaces/atlasia/darija-chatbot-arena + +All of this announcements was under 12 hours of time 🤯",[],[],"[{'reaction': '👍', 'users': ['hassenhamdi', 'muhammadahsanmemon', 'orixaowla', 'EdNolbed', 'C0casio45', 'installe', 'MohammedHamdy32', 'dantezxcd'], 'count': 8}, {'reaction': '🔥', 'users': ['MohamedRashad', 'John6666', 'dantezxcd'], 'count': 3}, {'reaction': '🧠', 'users': ['monsoon-nlp'], 'count': 1}]",2025-02-10 18:47:21,2025-02-10 18:47:42.402,[],/posts/MohamedRashad/480730425842027,3329,"{'language': 'en', 'probability': 0.8611674308776855}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,475030803757693,"[{'type': 'text', 'value': 'Agentic RAG: Applied, visual, and step-by-step! 🐾', 'raw': 'Agentic RAG: Applied, visual, and step-by-step! 🐾'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Get familiar with the Agents and tools, not the bells and whistles! ', 'raw': 'Get familiar with the Agents and tools, not the bells and whistles! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Retrieve - Augment and now GENERATE.', 'raw': 'Retrieve - Augment and now GENERATE.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'part 3: ', 'raw': 'part 3: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/davidberenstein1957/ai-blueprint-agentic-rag-part-3-generate', 'raw': 'https://huggingface.co/blog/davidberenstein1957/ai-blueprint-agentic-rag-part-3-generate'}]","Agentic RAG: Applied, visual, and step-by-step! 🐾 + +Get familiar with the Agents and tools, not the bells and whistles! + +Retrieve - Augment and now GENERATE. + +part 3: https://huggingface.co/blog/davidberenstein1957/ai-blueprint-agentic-rag-part-3-generate",[],[],"[{'reaction': '👍', 'users': ['makinuh', 'John6666', 'dai', 'djuna', 'Minami-su', 'dantezxcd'], 'count': 6}]",2025-02-06 09:50:15,2025-02-06 09:50:15.265,[],/posts/davidberenstein1957/475030803757693,2085,"{'language': 'en', 'probability': 0.596152663230896}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,274640196538235,"[{'type': 'text', 'value': 'VisoMaster (newest Open Source SOTA 0-shot Face Swap / Deep Fake APP) Tutorial and 1-Click Windows and Linux (Massed Compute) Installers', 'raw': 'VisoMaster (newest Open Source SOTA 0-shot Face Swap / Deep Fake APP) Tutorial and 1-Click Windows and Linux (Massed Compute) Installers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1-Click Installers > ', 'raw': '1-Click Installers > '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/121570322', 'raw': 'https://www.patreon.com/posts/121570322'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For requirements follow this video : ', 'raw': 'For requirements follow this video : '}, {'type': 'link', 'href': 'https://youtu.be/DrhUHnYfwC0', 'raw': 'https://youtu.be/DrhUHnYfwC0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow below screenshots to learn how to use the APP', 'raw': 'Follow below screenshots to learn how to use the APP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hopefully will make a tutorial video too', 'raw': 'Hopefully will make a tutorial video too'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'VisoMaster', 'raw': 'VisoMaster'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'VisoMaster is a powerful yet easy-to-use tool for face swapping and editing in images and videos. It utilizes AI to produce natural-looking results with minimal effort, making it ideal for both casual users and professionals.', 'raw': 'VisoMaster is a powerful yet easy-to-use tool for face swapping and editing in images and videos. It utilizes AI to produce natural-looking results with minimal effort, making it ideal for both casual users and professionals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Features', 'raw': 'Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ High-quality AI-powered face swapping for images and videos', 'raw': '✅ High-quality AI-powered face swapping for images and videos'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Easy-to-use interface with simple controls', 'raw': '✅ Easy-to-use interface with simple controls'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Supports multiple formats for input and output', 'raw': '✅ Supports multiple formats for input and output'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Efficient processing with GPU acceleration (CUDA support)', 'raw': '✅ Efficient processing with GPU acceleration (CUDA support)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Customizable models and fine-tuning options', 'raw': '✅ Customizable models and fine-tuning options'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official REPO : ', 'raw': 'Official REPO : '}, {'type': 'link', 'href': 'https://github.com/visomaster/VisoMaster', 'raw': 'https://github.com/visomaster/VisoMaster'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can also install via official repo', 'raw': 'You can also install via official repo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check below screenshots to learn how to use', 'raw': 'Check below screenshots to learn how to use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","VisoMaster (newest Open Source SOTA 0-shot Face Swap / Deep Fake APP) Tutorial and 1-Click Windows and Linux (Massed Compute) Installers + +1-Click Installers > https://www.patreon.com/posts/121570322 + +For requirements follow this video : https://youtu.be/DrhUHnYfwC0 + +Follow below screenshots to learn how to use the APP + +Hopefully will make a tutorial video too + +VisoMaster +VisoMaster is a powerful yet easy-to-use tool for face swapping and editing in images and videos. It utilizes AI to produce natural-looking results with minimal effort, making it ideal for both casual users and professionals. + +Features +✅ High-quality AI-powered face swapping for images and videos +✅ Easy-to-use interface with simple controls +✅ Supports multiple formats for input and output +✅ Efficient processing with GPU acceleration (CUDA support) +✅ Customizable models and fine-tuning options + +Official REPO : https://github.com/visomaster/VisoMaster + +You can also install via official repo + +Check below screenshots to learn how to use + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/-NfQ-vSSbWTyibZkkd_DQ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/nyEeyLRSsC_WxinSD_0bQ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/HMrng7wSk9owS2a4sWkz_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/TxnSCWMd-EFfcqfKeOlTG.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Z3wD2mJSiBKGBVgzN4Oj1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/ur8cky8lx2dFdxaWSo5fr.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/xIt3gRWallVIZbQTGHIYn.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/zqesb6bY-W4FFkh3RXfVN.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/xuqGtNFpwIsDidSH8_V2m.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/j2QDd8stfPDxHQbUVhWT3.png'}]",[],"[{'reaction': '😎', 'users': ['MonsterMMORPG', 'umair894', 'dantezxcd'], 'count': 3}, {'reaction': '🔥', 'users': ['MonsterMMORPG', 'mattinla'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'dantezxcd'], 'count': 2}, {'reaction': '👀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-02-06 01:16:50,2025-02-10 15:07:45.025,[],/posts/MonsterMMORPG/274640196538235,3214,"{'language': 'en', 'probability': 0.7684729695320129}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png,1048.0,Hexgrad,hexgrad,562263062112849,"[{'type': 'text', 'value': 'I wrote an article about G2P: ', 'raw': 'I wrote an article about G2P: '}, {'type': 'link', 'href': 'https://hf.co/blog/hexgrad/g2p', 'raw': 'https://hf.co/blog/hexgrad/g2p'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'G2P is an underrated piece of small TTS models, like offensive linemen who do a bunch of work and get no credit.', 'raw': 'G2P is an underrated piece of small TTS models, like offensive linemen who do a bunch of work and get no credit.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instead of relying on explicit G2P, larger speech models implicitly learn this task by eating many thousands of hours of audio data. They often use a 500M+ parameter LLM at the front to predict latent audio tokens over a learned codebook, then decode these tokens into audio.', 'raw': 'Instead of relying on explicit G2P, larger speech models implicitly learn this task by eating many thousands of hours of audio data. They often use a 500M+ parameter LLM at the front to predict latent audio tokens over a learned codebook, then decode these tokens into audio.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kokoro instead relies on G2P preprocessing, is 82M parameters, and thus needs less audio to learn. Because of this, we can cherrypick high fidelity audio for training data, and deliver solid speech for those voices. In turn, this excellent audio quality & lack of background noise helps explain why Kokoro is very competitive in single-voice TTS Arenas.', 'raw': 'Kokoro instead relies on G2P preprocessing, is 82M parameters, and thus needs less audio to learn. Because of this, we can cherrypick high fidelity audio for training data, and deliver solid speech for those voices. In turn, this excellent audio quality & lack of background noise helps explain why Kokoro is very competitive in single-voice TTS Arenas.'}]","I wrote an article about G2P: https://hf.co/blog/hexgrad/g2p + +G2P is an underrated piece of small TTS models, like offensive linemen who do a bunch of work and get no credit. + +Instead of relying on explicit G2P, larger speech models implicitly learn this task by eating many thousands of hours of audio data. They often use a 500M+ parameter LLM at the front to predict latent audio tokens over a learned codebook, then decode these tokens into audio. + +Kokoro instead relies on G2P preprocessing, is 82M parameters, and thus needs less audio to learn. Because of this, we can cherrypick high fidelity audio for training data, and deliver solid speech for those voices. In turn, this excellent audio quality & lack of background noise helps explain why Kokoro is very competitive in single-voice TTS Arenas.",[],[],"[{'reaction': '👍', 'users': ['fastpath', 'victor', 'BennyFace', 'krinal', 'spawn08', 'Jerboas86', 'whooray', 'Python2231', 'John6666', 'rocca', 'afrideva', 'ogchen', 'qingy2024', 'GordonChang', 'KingNish', 'srushti335', 'romanheinrich', 'Pendrokar', 'Ayedunno', 'dantezxcd'], 'count': 20}, {'reaction': '🔥', 'users': ['Spestly', 'dantezxcd'], 'count': 2}]",2025-02-05 20:32:16,2025-02-10 15:07:45.024,[],/posts/hexgrad/562263062112849,5873,"{'language': 'en', 'probability': 0.9041698575019836}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,450880198408916,"[{'type': 'text', 'value': 'Exciting Research Alert: Remining Hard Negatives for Domain Adaptation in Dense Retrieval', 'raw': 'Exciting Research Alert: Remining Hard Negatives for Domain Adaptation in Dense Retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Researchers from the University of Amsterdam have introduced R-GPL, an innovative approach to improve domain adaptation in dense retrievers. The technique enhances the existing GPL (Generative Pseudo Labeling) framework by continuously remining hard negatives during the training process.', 'raw': 'Researchers from the University of Amsterdam have introduced R-GPL, an innovative approach to improve domain adaptation in dense retrievers. The technique enhances the existing GPL (Generative Pseudo Labeling) framework by continuously remining hard negatives during the training process.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Technical Insights:', 'raw': 'Key Technical Insights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The method leverages domain-adapted models to mine higher quality hard negatives incrementally every 30,000 steps during training', 'raw': '- The method leverages domain-adapted models to mine higher quality hard negatives incrementally every 30,000 steps during training'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Uses MarginMSE loss for training with data triplets (Query, Relevant Doc, Hard Negative Doc)', 'raw': '- Uses MarginMSE loss for training with data triplets (Query, Relevant Doc, Hard Negative Doc)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implements mean pooling over hidden states for dense representations with 350 token sequence length', 'raw': '- Implements mean pooling over hidden states for dense representations with 350 token sequence length'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Combines query generation with pseudo-labels from cross-encoder models', 'raw': '- Combines query generation with pseudo-labels from cross-encoder models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Performance Highlights:', 'raw': 'Performance Highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Outperforms baseline GPL in 13/14 BEIR datasets', 'raw': '- Outperforms baseline GPL in 13/14 BEIR datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Shows significant improvements in 9/12 LoTTE datasets', 'raw': '- Shows significant improvements in 9/12 LoTTE datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Achieves remarkable 4.4 point gain on TREC-COVID dataset', 'raw': '- Achieves remarkable 4.4 point gain on TREC-COVID dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Under the Hood:', 'raw': 'Under the Hood:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The system continuously refreshes hard negatives using the model undergoing domain adaptation. This creates a feedback loop where the model gets better at identifying relevant documents in the target domain, leading to higher quality training signals.', 'raw': 'The system continuously refreshes hard negatives using the model undergoing domain adaptation. This creates a feedback loop where the model gets better at identifying relevant documents in the target domain, leading to higher quality training signals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Analysis reveals that domain-adapted models retrieve documents with higher relevancy scores in top-100 hard negatives compared to baseline approaches. This confirms the model's enhanced capability to identify challenging but informative training examples."", 'raw': ""Analysis reveals that domain-adapted models retrieve documents with higher relevancy scores in top-100 hard negatives compared to baseline approaches. This confirms the model's enhanced capability to identify challenging but informative training examples.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This research opens new possibilities for efficient dense retrieval systems that can adapt to different domains without requiring labeled training data.', 'raw': 'This research opens new possibilities for efficient dense retrieval systems that can adapt to different domains without requiring labeled training data.'}]","Exciting Research Alert: Remining Hard Negatives for Domain Adaptation in Dense Retrieval + +Researchers from the University of Amsterdam have introduced R-GPL, an innovative approach to improve domain adaptation in dense retrievers. The technique enhances the existing GPL (Generative Pseudo Labeling) framework by continuously remining hard negatives during the training process. + +Key Technical Insights: +- The method leverages domain-adapted models to mine higher quality hard negatives incrementally every 30,000 steps during training +- Uses MarginMSE loss for training with data triplets (Query, Relevant Doc, Hard Negative Doc) +- Implements mean pooling over hidden states for dense representations with 350 token sequence length +- Combines query generation with pseudo-labels from cross-encoder models + +Performance Highlights: +- Outperforms baseline GPL in 13/14 BEIR datasets +- Shows significant improvements in 9/12 LoTTE datasets +- Achieves remarkable 4.4 point gain on TREC-COVID dataset + +Under the Hood: +The system continuously refreshes hard negatives using the model undergoing domain adaptation. This creates a feedback loop where the model gets better at identifying relevant documents in the target domain, leading to higher quality training signals. + +Analysis reveals that domain-adapted models retrieve documents with higher relevancy scores in top-100 hard negatives compared to baseline approaches. This confirms the model's enhanced capability to identify challenging but informative training examples. + +This research opens new possibilities for efficient dense retrieval systems that can adapt to different domains without requiring labeled training data.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/qhq6kQFAitah1cRSNYm52.jpeg'}]",[],"[{'reaction': '🧠', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-02-05 18:17:51,2025-02-05 18:17:51.186,[],/posts/singhsidhukuldeep/450880198408916,1029,"{'language': 'en', 'probability': 0.8452136516571045}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d594d99621f3f1b14d776d/ntGadxUU7_iYsF09pNbcX.jpeg,26.0,Shukdev Datta,shukdevdatta123,749905382538636,"[{'type': 'text', 'value': 'Introducing Kokoro TTS Translate For All users:', 'raw': 'Introducing Kokoro TTS Translate For All users:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'shukdevdatta123/Kokoro-TTS'}, 'url': 'https://huggingface.co/spaces/shukdevdatta123/Kokoro-TTS', 'raw': 'https://huggingface.co/spaces/shukdevdatta123/Kokoro-TTS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1DIpBzJSBBeTcpkyxkHcpngLumMapEWQz?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1DIpBzJSBBeTcpkyxkHcpngLumMapEWQz?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(colab link for GPU access)', 'raw': '(colab link for GPU access)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our Streamlit application provides a text-to-speech conversion tool using the Kokoro library, allowing users to input text, select language and voice, and adjust speech speed. The generated audio can be played or downloaded as a WAV file. Optionally, an OpenAI API key enables text translation to English, with subsequent speech generation for both the original and translated text. This functionality, along with helpful instructions and sample prompts, positions the application for various business opportunities. It can be offered as a SaaS platform with tiered subscriptions for access to features like diverse voices, languages, and translation. Target markets include content creators, language learning platforms, accessibility tools, and businesses needing automated voice responses. Further revenue streams can be generated through API integration with other applications, custom voice creation or cloning services, and affiliate marketing with related services.', 'raw': 'Our Streamlit application provides a text-to-speech conversion tool using the Kokoro library, allowing users to input text, select language and voice, and adjust speech speed. The generated audio can be played or downloaded as a WAV file. Optionally, an OpenAI API key enables text translation to English, with subsequent speech generation for both the original and translated text. This functionality, along with helpful instructions and sample prompts, positions the application for various business opportunities. It can be offered as a SaaS platform with tiered subscriptions for access to features like diverse voices, languages, and translation. Target markets include content creators, language learning platforms, accessibility tools, and businesses needing automated voice responses. Further revenue streams can be generated through API integration with other applications, custom voice creation or cloning services, and affiliate marketing with related services.'}]","Introducing Kokoro TTS Translate For All users: + +https://huggingface.co/spaces/shukdevdatta123/Kokoro-TTS + +https://colab.research.google.com/drive/1DIpBzJSBBeTcpkyxkHcpngLumMapEWQz?usp=sharing + +(colab link for GPU access) + +Our Streamlit application provides a text-to-speech conversion tool using the Kokoro library, allowing users to input text, select language and voice, and adjust speech speed. The generated audio can be played or downloaded as a WAV file. Optionally, an OpenAI API key enables text translation to English, with subsequent speech generation for both the original and translated text. This functionality, along with helpful instructions and sample prompts, positions the application for various business opportunities. It can be offered as a SaaS platform with tiered subscriptions for access to features like diverse voices, languages, and translation. Target markets include content creators, language learning platforms, accessibility tools, and businesses needing automated voice responses. Further revenue streams can be generated through API integration with other applications, custom voice creation or cloning services, and affiliate marketing with related services.",[],[],"[{'reaction': '👍', 'users': ['krinal', 'Fishtiks', 'John6666', 'dantezxcd'], 'count': 4}]",2025-02-05 18:04:55,2025-02-10 15:07:45.025,[],/posts/shukdevdatta123/749905382538636,1780,"{'language': 'en', 'probability': 0.8606204390525818}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,234944926747608,"[{'type': 'text', 'value': 'Xwen 🔥 a series of open models based on Qwen2.5 models, developed by a brilliant research team of PhD students from the Chinese community. ', 'raw': 'Xwen 🔥 a series of open models based on Qwen2.5 models, developed by a brilliant research team of PhD students from the Chinese community. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'shenzhi-wang/xwen-chat-679e30ab1f4b90cfa7dbc49e'}, 'url': 'https://huggingface.co/collections/shenzhi-wang/xwen-chat-679e30ab1f4b90cfa7dbc49e', 'raw': 'https://huggingface.co/collections/shenzhi-wang/xwen-chat-679e30ab1f4b90cfa7dbc49e'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ 7B/72B', 'raw': '✨ 7B/72B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Apache 2.0', 'raw': '✨ Apache 2.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Xwen-72B-Chat outperformed DeepSeek V3 on Arena Hard Auto', 'raw': '✨ Xwen-72B-Chat outperformed DeepSeek V3 on Arena Hard Auto'}]","Xwen 🔥 a series of open models based on Qwen2.5 models, developed by a brilliant research team of PhD students from the Chinese community. +https://huggingface.co/collections/shenzhi-wang/xwen-chat-679e30ab1f4b90cfa7dbc49e +✨ 7B/72B +✨ Apache 2.0 +✨ Xwen-72B-Chat outperformed DeepSeek V3 on Arena Hard Auto","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/pxl9R6EOqZEPoXzf05QAb.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['AdinaY', 'rsparmaksiz', 'UICO', 'get2choks', 'John6666', 'amirhoseinnaderali', 'hiyouga', 'lakazerkk', 'hsqqing', 'dantezxcd', 'ryanferds'], 'count': 11}, {'reaction': '🚀', 'users': ['Winnougan', 'arshiaafshani', 'dantezxcd', 'ryanferds'], 'count': 4}]",2025-02-05 17:39:04,2025-02-05 17:39:04.083,[],/posts/AdinaY/234944926747608,3126,"{'language': 'en', 'probability': 0.8391921520233154}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65831342f9c5cda913df366a/h7MLYt--shRYQj4-q5XmR.jpeg,18.0,Lin Tan,lin-tan,763019179488759,"[{'type': 'text', 'value': '🚀 Excited to share that our paper, ""SELP: Generating Safe and Efficient Task Plans for Robot Agents with Large Language Models"", has been accepted to #ICRA2025! 🔗 Preprint: ', 'raw': '🚀 Excited to share that our paper, ""SELP: Generating Safe and Efficient Task Plans for Robot Agents with Large Language Models"", has been accepted to #ICRA2025! 🔗 Preprint: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2409.19471', 'raw': 'https://arxiv.org/pdf/2409.19471'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We introduce SELP (Safe Efficient LLM Planner), a novel approach for generating plans that adhere to user-specified constraints while optimizing for time-efficient execution. By leveraging linear temporal logic (LTL) to interpret natural language commands, SELP effectively handles complex commands and long-horizon tasks. 🤖', 'raw': 'We introduce SELP (Safe Efficient LLM Planner), a novel approach for generating plans that adhere to user-specified constraints while optimizing for time-efficient execution. By leveraging linear temporal logic (LTL) to interpret natural language commands, SELP effectively handles complex commands and long-horizon tasks. 🤖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡SELP presents three key insights:', 'raw': '💡SELP presents three key insights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Equivalence Voting: Ensures robust translations from natural language instructions into LTL specifications.', 'raw': '1️⃣ Equivalence Voting: Ensures robust translations from natural language instructions into LTL specifications.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Constrained Decoding: Uses the generated LTL formula to guide the autoregressive inference of plans, ensuring the generated plans conform to the LTL.', 'raw': '2️⃣ Constrained Decoding: Uses the generated LTL formula to guide the autoregressive inference of plans, ensuring the generated plans conform to the LTL.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Domain-Specific Fine-Tuning: Customizes LLMs for specific robotic tasks, boosting both safety and efficiency.', 'raw': '3️⃣ Domain-Specific Fine-Tuning: Customizes LLMs for specific robotic tasks, boosting both safety and efficiency.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Experiment: Our experiments demonstrate SELP’s effectiveness and generalizability across diverse tasks. In drone navigation, SELP outperforms state-of-the-art LLM planners by 10.8% in safety rate and by 19.8% in plan efficiency. For robot manipulation, SELP achieves a 20.4% improvement in safety rate.', 'raw': '📊 Experiment: Our experiments demonstrate SELP’s effectiveness and generalizability across diverse tasks. In drone navigation, SELP outperforms state-of-the-art LLM planners by 10.8% in safety rate and by 19.8% in plan efficiency. For robot manipulation, SELP achieves a 20.4% improvement in safety rate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'yiwu', 'raw': '@yiwu'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'jiang719', 'raw': '@jiang719'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#ICRA2025 #LLM #Robotics #Agent #LLMPlanner', 'raw': '#ICRA2025 #LLM #Robotics #Agent #LLMPlanner'}, {'type': 'new_line', 'raw': '\n'}]","🚀 Excited to share that our paper, ""SELP: Generating Safe and Efficient Task Plans for Robot Agents with Large Language Models"", has been accepted to #ICRA2025! 🔗 Preprint: https://arxiv.org/pdf/2409.19471 + +We introduce SELP (Safe Efficient LLM Planner), a novel approach for generating plans that adhere to user-specified constraints while optimizing for time-efficient execution. By leveraging linear temporal logic (LTL) to interpret natural language commands, SELP effectively handles complex commands and long-horizon tasks. 🤖 + +💡SELP presents three key insights: +1️⃣ Equivalence Voting: Ensures robust translations from natural language instructions into LTL specifications. +2️⃣ Constrained Decoding: Uses the generated LTL formula to guide the autoregressive inference of plans, ensuring the generated plans conform to the LTL. +3️⃣ Domain-Specific Fine-Tuning: Customizes LLMs for specific robotic tasks, boosting both safety and efficiency. + +📊 Experiment: Our experiments demonstrate SELP’s effectiveness and generalizability across diverse tasks. In drone navigation, SELP outperforms state-of-the-art LLM planners by 10.8% in safety rate and by 19.8% in plan efficiency. For robot manipulation, SELP achieves a 20.4% improvement in safety rate. + +@yiwu @jiang719 + +#ICRA2025 #LLM #Robotics #Agent #LLMPlanner +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65831342f9c5cda913df366a/IgpvM0pPAKxeEbqKFTx0b.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65831342f9c5cda913df366a/LibksExQGbv7uUZ-01w55.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65831342f9c5cda913df366a/MxxQZzAxVIcvEGOIxm1_-.png'}]","[{'_id': '629e4ca2f2bda18349b6d330', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/629e4ca2f2bda18349b6d330/gSnGTLm2ugpINECylbuuQ.jpeg', 'fullname': 'Nan Jiang', 'name': 'jiang719', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}, {'_id': '62c88b04ab9c23f5c459ed90', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62c88b04ab9c23f5c459ed90/tEaeuKpXdXwqK-zq1H-8a.png', 'fullname': 'Yi Wu', 'name': 'yiwu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '🔥', 'users': ['lin-tan', 'yiwu', 'AdinaY', 'rsparmaksiz', 'kakamon', 'fireblade2534', 'centavosx', 'czczup', 'John6666', 'dantezxcd'], 'count': 10}, {'reaction': '👍', 'users': ['SandhuJaspreet', 'n-ate', 'lin-tan', 'dantezxcd'], 'count': 4}]",2025-02-05 17:02:09,2025-04-26 15:00:42.154,"[{'_id': '65831342f9c5cda913df366a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65831342f9c5cda913df366a/h7MLYt--shRYQj4-q5XmR.jpeg', 'fullname': 'Lin Tan', 'name': 'lin-tan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 18, 'isFollowing': False}]",/posts/lin-tan/763019179488759,3339,"{'language': 'en', 'probability': 0.8170629143714905}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,794560065961999,"[{'type': 'text', 'value': '📱 UI Navigation Corpus - ', 'raw': '📱 UI Navigation Corpus - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'teleren/ui-navigation-corpus'}, 'url': 'https://huggingface.co/datasets/teleren/ui-navigation-corpus', 'raw': 'https://huggingface.co/datasets/teleren/ui-navigation-corpus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A comprehensive collection of mobile and web UI elements created by a new member of the Hugging Face community ', 'raw': 'A comprehensive collection of mobile and web UI elements created by a new member of the Hugging Face community '}, {'type': 'mention', 'user': 'teleren', 'raw': '@teleren'}, {'type': 'text', 'value': "". I'm glad that I was able to provide a little help together with "", 'raw': "". I'm glad that I was able to provide a little help together with ""}, {'type': 'mention', 'user': 'its5Q', 'raw': '@its5Q'}, {'type': 'text', 'value': ' to get this dataset published.', 'raw': ' to get this dataset published.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset contains:', 'raw': 'This dataset contains:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Screenshots and recordings of mobile (iOS/Android) and web interfaces', 'raw': '- Screenshots and recordings of mobile (iOS/Android) and web interfaces'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- UI navigation annotations and metadata ', 'raw': '- UI navigation annotations and metadata '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Screen categorization tags and text extractions', 'raw': '- Screen categorization tags and text extractions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Navigation paths and screen relationships', 'raw': '- Navigation paths and screen relationships'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Version control for UI imagery', 'raw': '- Version control for UI imagery'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Perfect for training UI navigation agents and understanding interface patterns. The dataset provides detailed annotations linking screens, sections, and navigation flows together.', 'raw': 'Perfect for training UI navigation agents and understanding interface patterns. The dataset provides detailed annotations linking screens, sections, and navigation flows together.'}]","📱 UI Navigation Corpus - https://huggingface.co/datasets/teleren/ui-navigation-corpus + +A comprehensive collection of mobile and web UI elements created by a new member of the Hugging Face community @teleren. I'm glad that I was able to provide a little help together with @its5Q to get this dataset published. + +This dataset contains: +- Screenshots and recordings of mobile (iOS/Android) and web interfaces +- UI navigation annotations and metadata +- Screen categorization tags and text extractions +- Navigation paths and screen relationships +- Version control for UI imagery + +Perfect for training UI navigation agents and understanding interface patterns. The dataset provides detailed annotations linking screens, sections, and navigation flows together.",[],"[{'_id': '61018047db2cfe014946a8ce', 'avatarUrl': '/avatars/a692e2e2a3b0222e2f8cdfc44ac8d64c.svg', 'fullname': 'its5Q', 'name': 'its5Q', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29}, {'_id': '670fef37e368118f411c33b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/670fef37e368118f411c33b5/qKBSx7lIrHoh8CtXETeRh.png', 'fullname': 'fg', 'name': 'teleren', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '👍', 'users': ['holooo', 'teleren', 'kakamon', 'sudanenator', 'Zmu', 'John6666', 'Metinhsimi', 'dantezxcd'], 'count': 8}]",2025-02-05 16:02:14,2025-02-05 16:02:14.473,[],/posts/nyuuzyou/794560065961999,2482,"{'language': 'en', 'probability': 0.8092094659805298}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg,29.0,Sk md saad amin,Reality123b,618233091113088,"[{'type': 'text', 'value': 'Introducing Xylaria! ', 'raw': 'Introducing Xylaria! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'you can try it here ', 'raw': 'you can try it here '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Lap1official/Xylaria-chat'}, 'url': 'https://huggingface.co/spaces/Lap1official/Xylaria-chat', 'raw': 'https://huggingface.co/spaces/Lap1official/Xylaria-chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'or for the bad UI but has metacognition and image generation plus upload one here:', 'raw': 'or for the bad UI but has metacognition and image generation plus upload one here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Lap1official/API'}, 'url': 'https://huggingface.co/spaces/Lap1official/API', 'raw': 'https://huggingface.co/spaces/Lap1official/API'}]","Introducing Xylaria! + +you can try it here +https://huggingface.co/spaces/Lap1official/Xylaria-chat +or for the bad UI but has metacognition and image generation plus upload one here: +https://huggingface.co/spaces/Lap1official/API",[],[],"[{'reaction': '👍', 'users': ['John6666', 'dantezxcd'], 'count': 2}]",2025-02-05 15:40:54,2025-02-05 16:15:09.824,[],/posts/Reality123b/618233091113088,602,"{'language': 'en', 'probability': 0.8169363737106323}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63bec50287619d1458c734d6/zIFfwPo8vxRZX9PYiUX7y.jpeg,12.0,Davide Cifarelli,davide221,489720857505796,"[{'type': 'text', 'value': 'I have just released Klarity, an open-source library that analyzes the entropy (both raw and semantic) of language model outputs. ', 'raw': 'I have just released Klarity, an open-source library that analyzes the entropy (both raw and semantic) of language model outputs. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The library uses a second model to generate json reports containing detailed analysis and insights, allowing you to better understand areas of uncertainty and decision making in the main model.', 'raw': 'The library uses a second model to generate json reports containing detailed analysis and insights, allowing you to better understand areas of uncertainty and decision making in the main model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you would like to test the library on HF models or give feedback, you are welcome!', 'raw': 'If you would like to test the library on HF models or give feedback, you are welcome!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Repo: ', 'raw': 'Repo: '}, {'type': 'link', 'href': 'https://github.com/klara-research/klarity', 'raw': 'https://github.com/klara-research/klarity'}]","I have just released Klarity, an open-source library that analyzes the entropy (both raw and semantic) of language model outputs. + +The library uses a second model to generate json reports containing detailed analysis and insights, allowing you to better understand areas of uncertainty and decision making in the main model. +If you would like to test the library on HF models or give feedback, you are welcome! + +Repo: https://github.com/klara-research/klarity",[],[],"[{'reaction': '🔥', 'users': ['dantezxcd'], 'count': 1}]",2025-02-05 12:55:01,2025-02-06 10:14:49.267,[],/posts/davide221/489720857505796,814,"{'language': 'en', 'probability': 0.9198662638664246}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63c09b32dd793d5a62895a95/SFdGQeiZpD5oxkl66wK2u.jpeg,48.0,Duskfall Crew,Duskfallcrew,750782980525301,"[{'type': 'text', 'value': ""I have too many articles I need to post now that i've left a certain site xD."", 'raw': ""I have too many articles I need to post now that i've left a certain site xD.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I\'m not copying ALL my articles over, just the really cool ones that I got to write because I have a design degree that means nothing in ML, and while I can\'t really give ML research a go totally? I can at least apply my ""I CAN MAKE PRETTY PICTURES"" knowledge to some academic theory! Watch this space, i\'ll be porting my Design Theory for AI articles over soon! <3 ', 'raw': 'I\'m not copying ALL my articles over, just the really cool ones that I got to write because I have a design degree that means nothing in ML, and while I can\'t really give ML research a go totally? I can at least apply my ""I CAN MAKE PRETTY PICTURES"" knowledge to some academic theory! Watch this space, i\'ll be porting my Design Theory for AI articles over soon! <3 '}]","I have too many articles I need to post now that i've left a certain site xD. +I'm not copying ALL my articles over, just the really cool ones that I got to write because I have a design degree that means nothing in ML, and while I can't really give ML research a go totally? I can at least apply my ""I CAN MAKE PRETTY PICTURES"" knowledge to some academic theory! Watch this space, i'll be porting my Design Theory for AI articles over soon! <3 ",[],[],"[{'reaction': '👀', 'users': ['John6666', 'joseph-bou'], 'count': 2}, {'reaction': '🚀', 'users': ['John6666', 'dantezxcd'], 'count': 2}, {'reaction': '👍', 'users': ['iweavings'], 'count': 1}]",2025-02-02 05:36:09,2025-02-02 05:36:09.096,[],/posts/Duskfallcrew/750782980525301,544,"{'language': 'en', 'probability': 0.961740255355835}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/665fef5a4794222f6a2fe605/sUF9NsMRNxdiEYKJbIbCk.jpeg,103.0,sometimesanotion,sometimesanotion,170152327285681,"[{'type': 'text', 'value': '**Update** Either I had some wrong numbers plugged in to estimate benchmark numbers from comparator, or the benchmark changed. Virtuoso Small v2 at 41.07 average is still very impressive, especially for writing draft copy for business purposes, while Lamarck remains a chatty generalist-reasoning model.', 'raw': '**Update** Either I had some wrong numbers plugged in to estimate benchmark numbers from comparator, or the benchmark changed. Virtuoso Small v2 at 41.07 average is still very impressive, especially for writing draft copy for business purposes, while Lamarck remains a chatty generalist-reasoning model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've felt confident that 14B Qwen finetunes and merges could break the 42.0 average, and Arcee **came close** with "", 'raw': ""I've felt confident that 14B Qwen finetunes and merges could break the 42.0 average, and Arcee **came close** with ""}, {'type': 'link', 'href': 'https://huggingface.co/arcee-ai/Virtuoso-Small-2', 'raw': 'https://huggingface.co/arcee-ai/Virtuoso-Small-2'}, {'type': 'text', 'value': '. Congratulations to ', 'raw': '. Congratulations to '}, {'type': 'mention', 'user': 'arcee-ai', 'raw': '@arcee-ai'}, {'type': 'text', 'value': '!', 'raw': '!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just two months ago, it was easy to think that 14B had plateaued, that you could have high IFEVAL or high MUSR/MATH/GPQA at 14B, but not both. That barrier is completely shattered. I see a pathway to even better, and Virtuoso Small 2 is a big part of why. Very impressive work. This community would expect no less from Arcee.', 'raw': 'Just two months ago, it was easy to think that 14B had plateaued, that you could have high IFEVAL or high MUSR/MATH/GPQA at 14B, but not both. That barrier is completely shattered. I see a pathway to even better, and Virtuoso Small 2 is a big part of why. Very impressive work. This community would expect no less from Arcee.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just look at this graph! Keep in mind, my merges here build on the first Virtuoso Small, and *-DS merges build on DeepSeek R1. There are some impressive merges in the pipe!', 'raw': 'Just look at this graph! Keep in mind, my merges here build on the first Virtuoso Small, and *-DS merges build on DeepSeek R1. There are some impressive merges in the pipe!'}]","**Update** Either I had some wrong numbers plugged in to estimate benchmark numbers from comparator, or the benchmark changed. Virtuoso Small v2 at 41.07 average is still very impressive, especially for writing draft copy for business purposes, while Lamarck remains a chatty generalist-reasoning model. + +I've felt confident that 14B Qwen finetunes and merges could break the 42.0 average, and Arcee **came close** with https://huggingface.co/arcee-ai/Virtuoso-Small-2. Congratulations to @arcee-ai! + +Just two months ago, it was easy to think that 14B had plateaued, that you could have high IFEVAL or high MUSR/MATH/GPQA at 14B, but not both. That barrier is completely shattered. I see a pathway to even better, and Virtuoso Small 2 is a big part of why. Very impressive work. This community would expect no less from Arcee. + +Just look at this graph! Keep in mind, my merges here build on the first Virtuoso Small, and *-DS merges build on DeepSeek R1. There are some impressive merges in the pipe!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/665fef5a4794222f6a2fe605/ztOb7sTh3u3VA-tvPfo1j.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'sthenno', 'Balda050', 'mlabonne', 'cnmoro', 'Crystalcareai', 'splendor1811', 'Dorjzodovsuren', 'stodavs', 'DeathGodlike', 'ThijsL202', 'dantezxcd'], 'count': 12}, {'reaction': '👍', 'users': ['Reithan', 'sthenno', 'Crystalcareai'], 'count': 3}]",2025-02-02 01:17:13,2025-02-03 22:39:13.837,"[{'_id': '66f889e35144a8d0c68b8078', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66f889e35144a8d0c68b8078/_aVcjIFcD6VD1N4mqog65.jpeg', 'fullname': 'Sthenno', 'name': 'sthenno', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 30, 'isFollowing': False}, {'_id': '665fef5a4794222f6a2fe605', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/665fef5a4794222f6a2fe605/sUF9NsMRNxdiEYKJbIbCk.jpeg', 'fullname': 'sometimesanotion', 'name': 'sometimesanotion', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 103, 'isFollowing': False}, {'_id': '658cfefb63d9c84928e94ad8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/658cfefb63d9c84928e94ad8/Wi_EsL_33XCCTe5LUVwDo.jpeg', 'fullname': 'Lucas Atkins', 'name': 'Crystalcareai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 171, 'isFollowing': False}]",/posts/sometimesanotion/170152327285681,3361,"{'language': 'en', 'probability': 0.9381181597709656}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,620793814308457,"[{'type': 'text', 'value': 'o3-mini is slightly better than R1, but lags behind Claude. Sorry folks, no new SOTA 😕', 'raw': 'o3-mini is slightly better than R1, but lags behind Claude. Sorry folks, no new SOTA 😕'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But OAI definitely owns the fashion of API. temperature and top_p are history now, reasoning_effort will be copied by other vendors.', 'raw': 'But OAI definitely owns the fashion of API. temperature and top_p are history now, reasoning_effort will be copied by other vendors.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'onekq-ai/WebApp1K-models-leaderboard'}, 'url': 'https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard', 'raw': 'https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard'}]","o3-mini is slightly better than R1, but lags behind Claude. Sorry folks, no new SOTA 😕 + +But OAI definitely owns the fashion of API. temperature and top_p are history now, reasoning_effort will be copied by other vendors. + +https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard",[],[],"[{'reaction': '👀', 'users': ['LPX55', 'John6666', 'victor', 'dantezxcd'], 'count': 4}]",2025-02-01 20:40:16,2025-02-03 23:30:29.945,"[{'_id': '639daf827270667011153fbc', 'avatarUrl': '/avatars/0af0894de0744ad83d491e8b59aa65ef.svg', 'fullname': 'Han Yoon', 'name': 'LPX55', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}, {'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '63d257ae5c52bbd72caad0ee', 'avatarUrl': '/avatars/f86902d43f2f90f2eb806363b4bb7831.svg', 'fullname': 'Dennis Bekkering', 'name': 'DB2323', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/onekq/620793814308457,1691,"{'language': 'en', 'probability': 0.8507285118103027}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg,3938.0,chansung park,chansung,759862376905006,"[{'type': 'text', 'value': 'A brief summary of the o3-mini ', 'raw': 'A brief summary of the o3-mini '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The OpenAI o3-mini model is a significant improvement over the o1-mini, reaching o1 performance levels. While generally good, its performance isn't universally better than previous models (o1, o1-prev.) or GPT-4o across all benchmarks. This means workflows should be re-evaluated with each model upgrade. "", 'raw': ""The OpenAI o3-mini model is a significant improvement over the o1-mini, reaching o1 performance levels. While generally good, its performance isn't universally better than previous models (o1, o1-prev.) or GPT-4o across all benchmarks. This means workflows should be re-evaluated with each model upgrade. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The o3-mini has ""low,"" ""medium,"" and ""high"" versions, with ""low"" being the base model used for benchmarking. It\'s speculated that the higher versions simply involve more processing. A fair comparison with other models like Gemini 2.0 Thinking or DeepSeek-R1 would likely need to use the ""low"" version and a similar ""think more"" mechanism. ', 'raw': 'The o3-mini has ""low,"" ""medium,"" and ""high"" versions, with ""low"" being the base model used for benchmarking. It\'s speculated that the higher versions simply involve more processing. A fair comparison with other models like Gemini 2.0 Thinking or DeepSeek-R1 would likely need to use the ""low"" version and a similar ""think more"" mechanism. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' The system card is recommended reading due to its comprehensive benchmark data.', 'raw': ' The system card is recommended reading due to its comprehensive benchmark data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://openai.com/index/openai-o3-mini/', 'raw': 'https://openai.com/index/openai-o3-mini/'}]","A brief summary of the o3-mini + +The OpenAI o3-mini model is a significant improvement over the o1-mini, reaching o1 performance levels. While generally good, its performance isn't universally better than previous models (o1, o1-prev.) or GPT-4o across all benchmarks. This means workflows should be re-evaluated with each model upgrade. + +The o3-mini has ""low,"" ""medium,"" and ""high"" versions, with ""low"" being the base model used for benchmarking. It's speculated that the higher versions simply involve more processing. A fair comparison with other models like Gemini 2.0 Thinking or DeepSeek-R1 would likely need to use the ""low"" version and a similar ""think more"" mechanism. + + The system card is recommended reading due to its comprehensive benchmark data. + +https://openai.com/index/openai-o3-mini/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60d3b57ad7b174177faabd6e/uFl88hKhqIRM7NMROy-60.png'}]",[],"[{'reaction': '👍', 'users': ['chansung', 'YaTharThShaRma999', 'nicolay-r', 'rohitchandra', 'John6666', 'bsaintmartin', 'victor', 'ThijsL202', 'KnutJaegersberg', 'Ukdah', 'XHasnain', 'ValentinVVV', 'dantezxcd'], 'count': 13}]",2025-02-01 19:28:36,2025-02-01 19:28:36.862,[],/posts/chansung/759862376905006,4444,"{'language': 'en', 'probability': 0.9358590841293335}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6776340dd3ceb4493fda0c6e/WqL1TrRewRD_6-7Y8j6S8.jpeg,44.0,Ruben Roy,rubenroy,542508832004692,"[{'type': 'text', 'value': '🎉 Fully released my newest models trained on my GammaCorpus dataset, Zurich 7B & 14B and Geneva 12B. Here is the model collections:', 'raw': '🎉 Fully released my newest models trained on my GammaCorpus dataset, Zurich 7B & 14B and Geneva 12B. Here is the model collections:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Zurich:', 'raw': 'Zurich:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'rubenroy/zurich-679b21284e207e2844bc025d'}, 'url': 'https://huggingface.co/collections/rubenroy/zurich-679b21284e207e2844bc025d', 'raw': 'https://huggingface.co/collections/rubenroy/zurich-679b21284e207e2844bc025d'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Geneva:', 'raw': 'Geneva:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/collections/rubenroy/geneva-679e33a55d1576319b0d9cd4', 'raw': 'https://huggingface.co/collections/rubenroy/geneva-679e33a55d1576319b0d9cd4'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you would like to test them, feel free to visit their spaces:', 'raw': 'If you would like to test them, feel free to visit their spaces:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'rubenroy/Geneva-12B'}, 'url': 'https://huggingface.co/spaces/rubenroy/Geneva-12B', 'raw': 'https://huggingface.co/spaces/rubenroy/Geneva-12B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'rubenroy/Zurich-14B'}, 'url': 'https://huggingface.co/spaces/rubenroy/Zurich-14B', 'raw': 'https://huggingface.co/spaces/rubenroy/Zurich-14B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'rubenroy/Zurich-7B'}, 'url': 'https://huggingface.co/spaces/rubenroy/Zurich-7B', 'raw': 'https://huggingface.co/spaces/rubenroy/Zurich-7B'}]","🎉 Fully released my newest models trained on my GammaCorpus dataset, Zurich 7B & 14B and Geneva 12B. Here is the model collections: + +Zurich: +https://huggingface.co/collections/rubenroy/zurich-679b21284e207e2844bc025d + +Geneva: +https://huggingface.co/collections/rubenroy/geneva-679e33a55d1576319b0d9cd4 + +If you would like to test them, feel free to visit their spaces: +https://huggingface.co/spaces/rubenroy/Geneva-12B +https://huggingface.co/spaces/rubenroy/Zurich-14B +https://huggingface.co/spaces/rubenroy/Zurich-7B",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', '8P2P8', 'nicolay-r', 'Spestly', 'Pankaj8922', 'SugarDaemon', 'SrsVanity', 'TurtarBar', 'Nulled404', 'DeepBlueNacht', 'John6666', 'Nbardy', 'dantezxcd'], 'count': 13}, {'reaction': '🚀', 'users': ['Spestly', 'SugarDaemon', 'TurtarBar', 'John6666'], 'count': 4}]",2025-02-01 17:32:23,2025-02-01 17:32:23.865,[],/posts/rubenroy/542508832004692,2903,"{'language': 'en', 'probability': 0.7689626216888428}",0 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,299776984655741,"[{'type': 'text', 'value': 'Another AHA moment', 'raw': 'Another AHA moment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/etemiz/aha-indicator', 'raw': 'https://huggingface.co/blog/etemiz/aha-indicator'}]","Another AHA moment + +https://huggingface.co/blog/etemiz/aha-indicator",[],[],"[{'reaction': '👀', 'users': ['John6666', 'KnutJaegersberg', 'dantezxcd'], 'count': 3}]",2025-02-01 17:15:12,2025-02-01 17:15:12.456,[],/posts/etemiz/299776984655741,1165,"{'language': 'en', 'probability': 0.4008864462375641}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/630920925a5c889aaedc7f33/w00N19M21l2FXe6ZasSYc.jpeg,15.0,Kristaller486,kristaller486,437053986647288,"[{'type': 'text', 'value': 'Nebo-T1-Russian', 'raw': 'Nebo-T1-Russian'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(Probably) the first ""longCoT"" dataset for the Russian language created via Deeseek-R1.', 'raw': '(Probably) the first ""longCoT"" dataset for the Russian language created via Deeseek-R1.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Prompts taken from the Sky-T1 dataset and translated via Llama3.3-70B.', 'raw': ' - Prompts taken from the Sky-T1 dataset and translated via Llama3.3-70B.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Answers and reasoning generated by Deepseek-R1 (685B).', 'raw': ' - Answers and reasoning generated by Deepseek-R1 (685B).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - 16.4K samples in total, ≈12.4K Russian-only (in the rest, either the answer or reasoning is in English).', 'raw': ' - 16.4K samples in total, ≈12.4K Russian-only (in the rest, either the answer or reasoning is in English).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Languages in the answers and reasoning are labeled using fasttext.', 'raw': ' - Languages in the answers and reasoning are labeled using fasttext.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'kristaller486/Nebo-T1-Russian'}, 'url': 'https://huggingface.co/datasets/kristaller486/Nebo-T1-Russian', 'raw': 'https://huggingface.co/datasets/kristaller486/Nebo-T1-Russian'}]","Nebo-T1-Russian + +(Probably) the first ""longCoT"" dataset for the Russian language created via Deeseek-R1. + + - Prompts taken from the Sky-T1 dataset and translated via Llama3.3-70B. + - Answers and reasoning generated by Deepseek-R1 (685B). + - 16.4K samples in total, ≈12.4K Russian-only (in the rest, either the answer or reasoning is in English). + - Languages in the answers and reasoning are labeled using fasttext. + +https://huggingface.co/datasets/kristaller486/Nebo-T1-Russian",[],[],"[{'reaction': '🚀', 'users': ['nicolay-r', 'John6666', 'AlexPoto', 'd0rj', 'dantezxcd'], 'count': 5}, {'reaction': '🔥', 'users': ['JLouisBiz', 'dantezxcd'], 'count': 2}]",2025-02-01 15:58:22,2025-02-01 16:45:24.212,[],/posts/kristaller486/437053986647288,1535,"{'language': 'en', 'probability': 0.8039143085479736}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,552429541606995,"[{'type': 'text', 'value': '📢 The LLaMA-3.1-8B distilled 8B version of the R1 DeepSeek AI is available besides the one based on Qwen', 'raw': '📢 The LLaMA-3.1-8B distilled 8B version of the R1 DeepSeek AI is available besides the one based on Qwen'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📙 Notebook for using it in reasoning over series of data 🧠 :', 'raw': '📙 Notebook for using it in reasoning over series of data 🧠 :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/tutorials/llm_deep_seek_7b_distill_llama3.ipynb', 'raw': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/tutorials/llm_deep_seek_7b_distill_llama3.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Loading using the pipeline API of the transformers library:', 'raw': 'Loading using the pipeline API of the transformers library:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/llm/transformers_llama.py', 'raw': 'https://github.com/nicolay-r/nlp-thirdgate/blob/master/llm/transformers_llama.py'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🟡 GPU Usage: 12.3 GB (FP16/FP32 mode) which is suitable for T4. (a 1.5 GB less than Qwen-distilled version)', 'raw': '🟡 GPU Usage: 12.3 GB (FP16/FP32 mode) which is suitable for T4. (a 1.5 GB less than Qwen-distilled version)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐌 Perfomance: T4 instance: ~0.19 tokens/sec (FP32 mode) and (FP16 mode) ~0.22-0.30 tokens/sec. Is it should be that slow? 🤔', 'raw': '🐌 Perfomance: T4 instance: ~0.19 tokens/sec (FP32 mode) and (FP16 mode) ~0.22-0.30 tokens/sec. Is it should be that slow? 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model name: ', 'raw': 'Model name: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-R1-Distill-Llama-8B'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-8B', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-8B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⭐ Framework: ', 'raw': '⭐ Framework: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-chain', 'raw': 'https://github.com/nicolay-r/bulk-chain'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌌 Notebooks and models hub: ', 'raw': '🌌 Notebooks and models hub: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/nlp-thirdgate', 'raw': 'https://github.com/nicolay-r/nlp-thirdgate'}]","📢 The LLaMA-3.1-8B distilled 8B version of the R1 DeepSeek AI is available besides the one based on Qwen + +📙 Notebook for using it in reasoning over series of data 🧠 : +https://github.com/nicolay-r/nlp-thirdgate/blob/master/tutorials/llm_deep_seek_7b_distill_llama3.ipynb + +Loading using the pipeline API of the transformers library: +https://github.com/nicolay-r/nlp-thirdgate/blob/master/llm/transformers_llama.py +🟡 GPU Usage: 12.3 GB (FP16/FP32 mode) which is suitable for T4. (a 1.5 GB less than Qwen-distilled version) +🐌 Perfomance: T4 instance: ~0.19 tokens/sec (FP32 mode) and (FP16 mode) ~0.22-0.30 tokens/sec. Is it should be that slow? 🤔 +Model name: https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Llama-8B +⭐ Framework: https://github.com/nicolay-r/bulk-chain +🌌 Notebooks and models hub: https://github.com/nicolay-r/nlp-thirdgate","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/-dxpcidgmF3Xe8_ZYE4o9.png'}]",[],"[{'reaction': '🔥', 'users': ['salmankhanpm', 'John6666', 'mkurman', 'JosakaX', 'rahim-xelpmoc', 'NickyNicky'], 'count': 6}]",2025-02-01 14:06:36,2025-02-01 14:19:09.926,[],/posts/nicolay-r/552429541606995,1626,"{'language': 'en', 'probability': 0.7785113453865051}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/665f02b35dbb28742489d3b1/JVK7VKRWI6wJwX2uKKV3F.png,70.0,Eric Chung,DawnC,715692568822821,"[{'type': 'text', 'value': '🌟 PawMatchAI: Making Breed Selection More Intuitive! 🐕', 'raw': '🌟 PawMatchAI: Making Breed Selection More Intuitive! 🐕'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Excited to share the latest breakthrough in my AI-powered companion for finding your perfect furry friend! I've made significant improvements in breed recognition through innovative learning techniques! "", 'raw': ""Excited to share the latest breakthrough in my AI-powered companion for finding your perfect furry friend! I've made significant improvements in breed recognition through innovative learning techniques! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""✨ What's New?"", 'raw': ""✨ What's New?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Major Recognition Enhancement:', 'raw': '🎯 Major Recognition Enhancement:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implemented ICARL with advanced knowledge distillation, inspired by human learning processes', 'raw': '- Implemented ICARL with advanced knowledge distillation, inspired by human learning processes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dramatically improved recognition of challenging breeds like Havanese', 'raw': '- Dramatically improved recognition of challenging breeds like Havanese'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Created an intelligent learning system that mimics how expert teachers adapt their teaching style', 'raw': '- Created an intelligent learning system that mimics how expert teachers adapt their teaching style'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Added smart feature protection to maintain recognition accuracy across all breeds', 'raw': '- Added smart feature protection to maintain recognition accuracy across all breeds'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔬 Technical Innovations:', 'raw': '🔬 Technical Innovations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Enhanced breed recognition through advanced morphological feature analysis', 'raw': '- Enhanced breed recognition through advanced morphological feature analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implemented sophisticated feature extraction system for body proportions, head features, tail structure, fur texture, and color patterns', 'raw': '- Implemented sophisticated feature extraction system for body proportions, head features, tail structure, fur texture, and color patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Added intelligent attention mechanism for dynamic feature focus', 'raw': '- Added intelligent attention mechanism for dynamic feature focus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Improved multi-dog detection with enhanced spatial analysis', 'raw': '- Improved multi-dog detection with enhanced spatial analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Key Features:', 'raw': '🎯 Key Features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Smart breed recognition powered by biomimetic AI architecture', 'raw': '- Smart breed recognition powered by biomimetic AI architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Visual matching scores with intuitive color indicators', 'raw': '- Visual matching scores with intuitive color indicators'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Detailed breed comparisons with interactive tooltips', 'raw': '- Detailed breed comparisons with interactive tooltips'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Lifestyle-based recommendations tailored to your needs', 'raw': '- Lifestyle-based recommendations tailored to your needs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💭 Project Vision', 'raw': '💭 Project Vision'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Taking inspiration from both AI technology and natural learning processes, this project continues to evolve in making breed selection more accessible while pushing the boundaries of AI capabilities.', 'raw': 'Taking inspiration from both AI technology and natural learning processes, this project continues to evolve in making breed selection more accessible while pushing the boundaries of AI capabilities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Try it now: ', 'raw': '👉 Try it now: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DawnC/PawMatchAI'}, 'url': 'https://huggingface.co/spaces/DawnC/PawMatchAI', 'raw': 'https://huggingface.co/spaces/DawnC/PawMatchAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Your likes ❤️ fuel the continuous improvement of this project!', 'raw': 'Your likes ❤️ fuel the continuous improvement of this project!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #MachineLearning #DeepLearning #Pytorch #ComputerVision #TechForLife #ICARL #KnowledgeDistillation', 'raw': '#AI #MachineLearning #DeepLearning #Pytorch #ComputerVision #TechForLife #ICARL #KnowledgeDistillation'}]","🌟 PawMatchAI: Making Breed Selection More Intuitive! 🐕 + +Excited to share the latest breakthrough in my AI-powered companion for finding your perfect furry friend! I've made significant improvements in breed recognition through innovative learning techniques! + +✨ What's New? + +🎯 Major Recognition Enhancement: +- Implemented ICARL with advanced knowledge distillation, inspired by human learning processes +- Dramatically improved recognition of challenging breeds like Havanese +- Created an intelligent learning system that mimics how expert teachers adapt their teaching style +- Added smart feature protection to maintain recognition accuracy across all breeds + +🔬 Technical Innovations: +- Enhanced breed recognition through advanced morphological feature analysis +- Implemented sophisticated feature extraction system for body proportions, head features, tail structure, fur texture, and color patterns +- Added intelligent attention mechanism for dynamic feature focus +- Improved multi-dog detection with enhanced spatial analysis + +🎯 Key Features: +- Smart breed recognition powered by biomimetic AI architecture +- Visual matching scores with intuitive color indicators +- Detailed breed comparisons with interactive tooltips +- Lifestyle-based recommendations tailored to your needs + +💭 Project Vision +Taking inspiration from both AI technology and natural learning processes, this project continues to evolve in making breed selection more accessible while pushing the boundaries of AI capabilities. + +👉 Try it now: https://huggingface.co/spaces/DawnC/PawMatchAI + +Your likes ❤️ fuel the continuous improvement of this project! + +#AI #MachineLearning #DeepLearning #Pytorch #ComputerVision #TechForLife #ICARL #KnowledgeDistillation",[],[],"[{'reaction': '❤️', 'users': ['John6666', 'joseph-bou'], 'count': 2}, {'reaction': '🤗', 'users': ['fuzzy-mittenz'], 'count': 1}]",2025-02-01 13:10:37,2025-02-08 02:07:40.683,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '665f02b35dbb28742489d3b1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/665f02b35dbb28742489d3b1/JVK7VKRWI6wJwX2uKKV3F.png', 'fullname': 'Eric Chung', 'name': 'DawnC', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 70, 'isFollowing': False}]",/posts/DawnC/715692568822821,1277,"{'language': 'en', 'probability': 0.8679927587509155}",10 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,919224792916963,"[{'type': 'text', 'value': 'Excited to share groundbreaking research from @Baidu_Inc on enterprise information search! The team has developed EICopilot, a revolutionary agent-based solution that transforms how we explore enterprise data in large-scale knowledge graphs.', 'raw': 'Excited to share groundbreaking research from @Baidu_Inc on enterprise information search! The team has developed EICopilot, a revolutionary agent-based solution that transforms how we explore enterprise data in large-scale knowledge graphs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Technical Innovation', 'raw': '>> Technical Innovation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'EICopilot leverages Large Language Models to interpret natural language queries and automatically generates Gremlin scripts for enterprise data exploration. The system processes hundreds of millions of nodes and billions of edges in real-time, handling complex enterprise relationships with remarkable precision.', 'raw': 'EICopilot leverages Large Language Models to interpret natural language queries and automatically generates Gremlin scripts for enterprise data exploration. The system processes hundreds of millions of nodes and billions of edges in real-time, handling complex enterprise relationships with remarkable precision.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Technical Components:', 'raw': 'Key Technical Components:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Advanced data pre-processing pipeline that builds vector databases of representative queries', 'raw': '- Advanced data pre-processing pipeline that builds vector databases of representative queries'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Novel query masking strategy that significantly improves intent recognition', 'raw': '- Novel query masking strategy that significantly improves intent recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Comprehensive reasoning pipeline combining Chain-of-Thought with In-context learning', 'raw': '- Comprehensive reasoning pipeline combining Chain-of-Thought with In-context learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Named Entity Recognition and Natural Language Processing Customization for precise entity matching', 'raw': '- Named Entity Recognition and Natural Language Processing Customization for precise entity matching'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Schema Linking Module for efficient graph database query generation', 'raw': '- Schema Linking Module for efficient graph database query generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Performance Metrics', 'raw': '>> Performance Metrics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The results are impressive - EICopilot achieves a syntax error rate as low as 10% and execution correctness up to 82.14%. The system handles 5000+ daily active users, demonstrating its robustness in real-world applications.', 'raw': 'The results are impressive - EICopilot achieves a syntax error rate as low as 10% and execution correctness up to 82.14%. The system handles 5000+ daily active users, demonstrating its robustness in real-world applications.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Implementation Details', 'raw': '>> Implementation Details'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The system uses Apache TinkerPop for graph database construction and employs sophisticated disambiguation processes, including anaphora resolution and entity retrieval. The architecture includes both offline and online phases, with continuous learning from user interactions to improve query accuracy.', 'raw': 'The system uses Apache TinkerPop for graph database construction and employs sophisticated disambiguation processes, including anaphora resolution and entity retrieval. The architecture includes both offline and online phases, with continuous learning from user interactions to improve query accuracy.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kudos to the research team from Baidu Inc., South China University of Technology, and other collaborating institutions for this significant advancement in enterprise information retrieval technology.', 'raw': 'Kudos to the research team from Baidu Inc., South China University of Technology, and other collaborating institutions for this significant advancement in enterprise information retrieval technology.'}]","Excited to share groundbreaking research from @Baidu_Inc on enterprise information search! The team has developed EICopilot, a revolutionary agent-based solution that transforms how we explore enterprise data in large-scale knowledge graphs. + +>> Technical Innovation +EICopilot leverages Large Language Models to interpret natural language queries and automatically generates Gremlin scripts for enterprise data exploration. The system processes hundreds of millions of nodes and billions of edges in real-time, handling complex enterprise relationships with remarkable precision. + +Key Technical Components: +- Advanced data pre-processing pipeline that builds vector databases of representative queries +- Novel query masking strategy that significantly improves intent recognition +- Comprehensive reasoning pipeline combining Chain-of-Thought with In-context learning +- Named Entity Recognition and Natural Language Processing Customization for precise entity matching +- Schema Linking Module for efficient graph database query generation + +>> Performance Metrics +The results are impressive - EICopilot achieves a syntax error rate as low as 10% and execution correctness up to 82.14%. The system handles 5000+ daily active users, demonstrating its robustness in real-world applications. + +>> Implementation Details +The system uses Apache TinkerPop for graph database construction and employs sophisticated disambiguation processes, including anaphora resolution and entity retrieval. The architecture includes both offline and online phases, with continuous learning from user interactions to improve query accuracy. + +Kudos to the research team from Baidu Inc., South China University of Technology, and other collaborating institutions for this significant advancement in enterprise information retrieval technology.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/JxwSS7CwFD81vM8RoSIyo.jpeg'}]",[],"[{'reaction': '🤗', 'users': ['Shanvaj', 'John6666', 'vcosn', 'mediiiiii3', 'KingNish'], 'count': 5}, {'reaction': '👍', 'users': ['AndreyIrk', 'alt2023', 'mediiiiii3', 'DataSoul', 'umair894'], 'count': 5}]",2025-02-01 05:23:58,2025-02-01 13:01:52.045,"[{'_id': '679e171c36b51abda2a5d61c', 'avatarUrl': '/avatars/b462ad655408dccefed366236a58787c.svg', 'fullname': 'ALI HASHIM', 'name': 'Alhashim-01', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/singhsidhukuldeep/919224792916963,2222,"{'language': 'en', 'probability': 0.8388857841491699}",1 +/avatars/937a64aea8fde2f41a065f052b39f409.svg,48.0,alkinun,AtAndDev,637239470712027,"[{'type': 'text', 'value': 'everywhere i go i see his face', 'raw': 'everywhere i go i see his face'}]",everywhere i go i see his face,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/630f3e4002ce39336c411048/pWVgmy5I7GAxs0TMN0juW.png'}]",[],"[{'reaction': '🔥', 'users': ['Ma121', 'John6666', 'zkdesigns', 'Vaibhav212303', 'Oladele0905444', 'dillfrescott', 'actualbrain', 'littlecowmoo', 'mukeshBoss'], 'count': 9}]",2025-01-29 09:59:30,2025-01-29 09:59:30.344,[],/posts/AtAndDev/637239470712027,1932,"{'language': 'en', 'probability': 0.9918233752250671}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,922447741350865,"[{'type': 'text', 'value': 'Groundbreaking Research Alert: Can Large Language Models Really Understand Personal Preferences?', 'raw': 'Groundbreaking Research Alert: Can Large Language Models Really Understand Personal Preferences?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""A fascinating new study from researchers at University of Notre Dame, Xi'an Jiaotong University, and Université de Montréal introduces PERRECBENCH - a novel benchmark for evaluating how well Large Language Models (LLMs) understand user preferences in recommendation systems."", 'raw': ""A fascinating new study from researchers at University of Notre Dame, Xi'an Jiaotong University, and Université de Montréal introduces PERRECBENCH - a novel benchmark for evaluating how well Large Language Models (LLMs) understand user preferences in recommendation systems.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Technical Insights:', 'raw': 'Key Technical Insights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The benchmark eliminates user rating bias and item quality factors by using relative ratings and grouped ranking approaches', 'raw': '- The benchmark eliminates user rating bias and item quality factors by using relative ratings and grouped ranking approaches'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implements three distinct ranking methods: pointwise rating prediction, pairwise comparison, and listwise ranking', 'raw': '- Implements three distinct ranking methods: pointwise rating prediction, pairwise comparison, and listwise ranking'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Evaluates 19 state-of-the-art LLMs including Claude-3.5, GPT-4, Llama-3, Mistral, and Qwen models', 'raw': '- Evaluates 19 state-of-the-art LLMs including Claude-3.5, GPT-4, Llama-3, Mistral, and Qwen models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Uses Kendall's tau correlation to measure ranking accuracy"", 'raw': ""- Uses Kendall's tau correlation to measure ranking accuracy""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Incorporates BM25 retriever with configurable history items (k=4 by default)', 'raw': '- Incorporates BM25 retriever with configurable history items (k=4 by default)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Notable Findings:', 'raw': 'Notable Findings:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Current LLMs struggle with true personalization, achieving only moderate correlation scores', 'raw': '- Current LLMs struggle with true personalization, achieving only moderate correlation scores'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Larger models don't always perform better - challenging conventional scaling laws"", 'raw': ""- Larger models don't always perform better - challenging conventional scaling laws""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Pairwise and listwise ranking methods outperform pointwise approaches', 'raw': '- Pairwise and listwise ranking methods outperform pointwise approaches'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Open-source models like Mistral-123B and Llama-3-405B compete well with proprietary models', 'raw': '- Open-source models like Mistral-123B and Llama-3-405B compete well with proprietary models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Weight merging strategy shows promise for improving personalization capabilities', 'raw': '- Weight merging strategy shows promise for improving personalization capabilities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The research reveals that while LLMs excel at many tasks, they still face significant challenges in understanding individual user preferences. This work opens new avenues for improving personalized recommendation systems and highlights the importance of developing better evaluation methods.', 'raw': 'The research reveals that while LLMs excel at many tasks, they still face significant challenges in understanding individual user preferences. This work opens new avenues for improving personalized recommendation systems and highlights the importance of developing better evaluation methods.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A must-read for anyone interested in LLMs, recommender systems, or personalization technology. The team has made their benchmark and code publicly available for further research.', 'raw': 'A must-read for anyone interested in LLMs, recommender systems, or personalization technology. The team has made their benchmark and code publicly available for further research.'}]","Groundbreaking Research Alert: Can Large Language Models Really Understand Personal Preferences? + +A fascinating new study from researchers at University of Notre Dame, Xi'an Jiaotong University, and Université de Montréal introduces PERRECBENCH - a novel benchmark for evaluating how well Large Language Models (LLMs) understand user preferences in recommendation systems. + +Key Technical Insights: +- The benchmark eliminates user rating bias and item quality factors by using relative ratings and grouped ranking approaches +- Implements three distinct ranking methods: pointwise rating prediction, pairwise comparison, and listwise ranking +- Evaluates 19 state-of-the-art LLMs including Claude-3.5, GPT-4, Llama-3, Mistral, and Qwen models +- Uses Kendall's tau correlation to measure ranking accuracy +- Incorporates BM25 retriever with configurable history items (k=4 by default) + +Notable Findings: +- Current LLMs struggle with true personalization, achieving only moderate correlation scores +- Larger models don't always perform better - challenging conventional scaling laws +- Pairwise and listwise ranking methods outperform pointwise approaches +- Open-source models like Mistral-123B and Llama-3-405B compete well with proprietary models +- Weight merging strategy shows promise for improving personalization capabilities + +The research reveals that while LLMs excel at many tasks, they still face significant challenges in understanding individual user preferences. This work opens new avenues for improving personalized recommendation systems and highlights the importance of developing better evaluation methods. + +A must-read for anyone interested in LLMs, recommender systems, or personalization technology. The team has made their benchmark and code publicly available for further research.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/6oqNE4EGb7CdyG18NuPQh.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'Seyha620', 'JoniJo', 'mkurman'], 'count': 4}, {'reaction': '👍', 'users': ['zkdesigns', 'JoniJo', 'lachsokhour'], 'count': 3}, {'reaction': '🤝', 'users': ['JoniJOST'], 'count': 1}]",2025-01-29 09:05:52,2025-01-29 09:05:52.079,[],/posts/singhsidhukuldeep/922447741350865,1682,"{'language': 'en', 'probability': 0.8519598841667175}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,550178345613716,"[{'type': 'text', 'value': 'Deepswipe by', 'raw': 'Deepswipe by'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '. Deepseek🐬🗿', 'raw': '. Deepseek🐬🗿'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Everything is now in recovery. 📉📈', 'raw': 'Everything is now in recovery. 📉📈'}]","Deepswipe by +. +. +. +. Deepseek🐬🗿 + + + + + + +Everything is now in recovery. 📉📈","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/3zbu3BM8qnwNj9ZSYVssV.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['lukkaji', 'John6666', 'Makar7', 'AtAndDev', 'christinjoseph', 'JoniJo', 'GaNichols', 'Mandeep3838', 'Ayanoshin', 'JingzeShi'], 'count': 10}, {'reaction': '😎', 'users': ['John6666', 'Makar7', 'AtAndDev', 'efecelik', 'sirmyrrh', 'Nymbo'], 'count': 6}, {'reaction': '🤝', 'users': ['JoniJOST', 'Niksharma1201'], 'count': 2}, {'reaction': '❤️', 'users': ['DanteA42'], 'count': 1}]",2025-01-29 08:43:47,2025-01-31 05:14:03.190,"[{'_id': '679a4ff48ac89ca9430d5416', 'avatarUrl': '/avatars/2c46ea8f5c5a8aa29c797dbdc3eae21e.svg', 'fullname': 'Jonathan Cenat', 'name': 'Jonathan1998', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957, 'isFollowing': False}, {'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '6593502ca2607099284523db', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6593502ca2607099284523db/svxu-iTvwsgmiYNgaFGIN.png', 'fullname': 'William J. Marshall', 'name': 'fuzzy-mittenz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 44, 'isFollowing': False}]",/posts/prithivMLmods/550178345613716,5192,"{'language': 'en', 'probability': 0.9180638194084167}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,625815797541641,"[{'type': 'text', 'value': 'A U T O I N T E R P R E T E R✌️🔥', 'raw': 'A U T O I N T E R P R E T E R✌️🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Took me long to found out how to nicely make Open-Interpreter work smoothly with UI.', 'raw': 'Took me long to found out how to nicely make Open-Interpreter work smoothly with UI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[OPEN SPACE](', 'raw': '[OPEN SPACE]('}, {'type': 'link', 'href': 'https://huggingface.co/spaces/luigi12345/AutoInterpreter', 'raw': 'https://huggingface.co/spaces/luigi12345/AutoInterpreter'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Run ANY script in your browser, download files, scrap emails, create images, debug files and recommit back… 😲❤️ ', 'raw': '✅ Run ANY script in your browser, download files, scrap emails, create images, debug files and recommit back… 😲❤️ '}]","A U T O I N T E R P R E T E R✌️🔥 +Took me long to found out how to nicely make Open-Interpreter work smoothly with UI. +[OPEN SPACE](https://huggingface.co/spaces/luigi12345/AutoInterpreter) +✅ Run ANY script in your browser, download files, scrap emails, create images, debug files and recommit back… 😲❤️ ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d00458fff501149572827f/Rf3NgeNr25QI89gGehx2t.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d00458fff501149572827f/QQk0kL6EDBhR1MWc44Kfr.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65d00458fff501149572827f/0AjRmw1w8_jSABVxfBqWT.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'JoniJo', 'samihalawa'], 'count': 3}]",2025-01-29 01:29:10,2025-01-29 07:09:31.819,[],/posts/samihalawa/625815797541641,1494,"{'language': 'en', 'probability': 0.7328414916992188}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,585087418338350,"[{'type': 'text', 'value': 'It’s not just a flood of model releases, papers are dropping just as fast 🚀', 'raw': 'It’s not just a flood of model releases, papers are dropping just as fast 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here are the 10 most upvoted papers from the Chinese community:', 'raw': 'Here are the 10 most upvoted papers from the Chinese community:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'zh-ai-community/2025-january-papers-679933cbf0f3ced11f5a168a'}, 'url': 'https://huggingface.co/collections/zh-ai-community/2025-january-papers-679933cbf0f3ced11f5a168a', 'raw': 'https://huggingface.co/collections/zh-ai-community/2025-january-papers-679933cbf0f3ced11f5a168a'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","It’s not just a flood of model releases, papers are dropping just as fast 🚀 + +Here are the 10 most upvoted papers from the Chinese community: +👉 https://huggingface.co/collections/zh-ai-community/2025-january-papers-679933cbf0f3ced11f5a168a + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/Dej3wxbahRr-6aHF5TrrP.png'}]",[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'John6666', 'DeathGodlike', 'JoniJo', 'BrigitteTousi'], 'count': 5}, {'reaction': '👍', 'users': ['MegaTronX', 'JoniJo', 'attashe'], 'count': 3}]",2025-01-28 20:35:11,2025-01-28 20:36:41.446,[],/posts/AdinaY/585087418338350,3208,"{'language': 'en', 'probability': 0.8868496417999268}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1672164046414-624b4a964056e2a6914a05c5.png,2779.0,Dylan Ebert,dylanebert,454214177555870,"[{'type': 'text', 'value': 'I made a 1 minute video explaining the DeepSeek situation', 'raw': 'I made a 1 minute video explaining the DeepSeek situation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'R1: ', 'raw': 'R1: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-R1'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-R1', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-R1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Janus Pro: ', 'raw': 'Janus Pro: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'deepseek-ai/Janus-Pro-7B'}, 'url': 'https://huggingface.co/spaces/deepseek-ai/Janus-Pro-7B', 'raw': 'https://huggingface.co/spaces/deepseek-ai/Janus-Pro-7B'}]","I made a 1 minute video explaining the DeepSeek situation + +R1: https://huggingface.co/deepseek-ai/DeepSeek-R1 +Janus Pro: https://huggingface.co/spaces/deepseek-ai/Janus-Pro-7B","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/624b4a964056e2a6914a05c5/o9aysNaFLDbRKv1o7Vi6h.mp4'}]",[],"[{'reaction': '🔥', 'users': ['blanchon', 'elijaaaaah', 'DamianBoborzi', 'John6666', 'Ba2han', 'ayushash1', 'JoniJo', 'Kyledude95', 'adriansanz', 'brandyDolly', 'Dragoy'], 'count': 11}, {'reaction': '😎', 'users': ['wjones3668', 'Coliflower', 'JoniJo', 'Dragoy'], 'count': 4}, {'reaction': '🤝', 'users': ['JoniJOST', 'Dragoy'], 'count': 2}, {'reaction': '❤️', 'users': ['DanteA42', 'Dragoy'], 'count': 2}]",2025-01-28 19:55:38,2025-01-29 02:42:37.710,"[{'_id': '6186ddf6a7717cb375090c01', 'avatarUrl': '/avatars/716b6a7d1094c8036b2a8a7b9063e8aa.svg', 'fullname': 'Julien BLANCHON', 'name': 'blanchon', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 129, 'isFollowing': False}, {'_id': '646e3060f813cfe153f1b376', 'avatarUrl': '/avatars/d39bb5fb865931b6d4e45778831d64e8.svg', 'fullname': 'Nielly', 'name': 'Nielly', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '63ebb38dd64e6436e2306480', 'avatarUrl': '/avatars/15fb7be2083499ea08d986ff66b3708e.svg', 'fullname': 'Jayan Kesavan', 'name': 'jayan12k', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/dylanebert/454214177555870,3357,"{'language': 'en', 'probability': 0.5684375762939453}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/65831342f9c5cda913df366a/h7MLYt--shRYQj4-q5XmR.jpeg,18.0,Lin Tan,lin-tan,229890633852903,"[{'type': 'text', 'value': ""Introducing Nova (ICLR’25), foundation models for binary/assembly code. We have also released fine-tuned models for binary code decompilation. Preprint: arxiv.org/pdf/2311.13721 This is our follow-up work on binary analysis after our CCS'24 distinguished paper ("", 'raw': ""Introducing Nova (ICLR’25), foundation models for binary/assembly code. We have also released fine-tuned models for binary code decompilation. Preprint: arxiv.org/pdf/2311.13721 This is our follow-up work on binary analysis after our CCS'24 distinguished paper (""}, {'type': 'link', 'href': 'https://www.linkedin.com/posts/lintan_resym-harnessing-llms-to-recover-variable-activity-7231749452154159105-sEgj', 'raw': 'https://www.linkedin.com/posts/lintan_resym-harnessing-llms-to-recover-variable-activity-7231749452154159105-sEgj'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Highlights:', 'raw': 'Highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Nova is built with hierarchical attention specially designed for binary and contrastive learning.', 'raw': '1. Nova is built with hierarchical attention specially designed for binary and contrastive learning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Nova is pre-trained on 3B binary and source code tokens.', 'raw': '2. Nova is pre-trained on 3B binary and source code tokens.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Models: ', 'raw': '3. Models: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'lt-asset/nova-6.7b'}, 'url': 'https://huggingface.co/lt-asset/nova-6.7b', 'raw': 'https://huggingface.co/lt-asset/nova-6.7b'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'lt-asset/nova-6.7b-bcr'}, 'url': 'https://huggingface.co/lt-asset/nova-6.7b-bcr', 'raw': 'https://huggingface.co/lt-asset/nova-6.7b-bcr'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Smaller 1.3B models ', 'raw': '4. Smaller 1.3B models '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'lt-asset/nova-1.3b'}, 'url': 'https://huggingface.co/lt-asset/nova-1.3b…', 'raw': 'https://huggingface.co/lt-asset/nova-1.3b…'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'lt-asset/nova-1.3b-bcr'}, 'url': 'https://huggingface.co/lt-asset/nova-1.3b-bcr', 'raw': 'https://huggingface.co/lt-asset/nova-1.3b-bcr'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Binaries are a form of code. Do not forget about binaries when you work on #LLM4Code.', 'raw': 'Binaries are a form of code. Do not forget about binaries when you work on #LLM4Code.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Why binaries and binary models? Binary code plays an irreplaceable role in crucial tasks, including vulnerability detection, malware detection, binary recovery, and legacy software maintenance.\xa0For example, when performing tasks such as identifying attacks and malware, security analysts often only have access to assembly, i.e., the human-readable representation of binary code, which is extremely difficult to understand.\xa0Thus, combined with the increasing sophistication of cybercrime that poses significant threats worldwide (e.g., cybercrime is predicted to cost the world $10.5 trillion annually by 2025 (Sausalito, 2020)), effective binary analysis techniques are in high demand.', 'raw': 'Why binaries and binary models? Binary code plays an irreplaceable role in crucial tasks, including vulnerability detection, malware detection, binary recovery, and legacy software maintenance.\xa0For example, when performing tasks such as identifying attacks and malware, security analysts often only have access to assembly, i.e., the human-readable representation of binary code, which is extremely difficult to understand.\xa0Thus, combined with the increasing sophistication of cybercrime that poses significant threats worldwide (e.g., cybercrime is predicted to cost the world $10.5 trillion annually by 2025 (Sausalito, 2020)), effective binary analysis techniques are in high demand.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#LLM4Code #LLM\xa0#BinaryAnalysis #Security', 'raw': '#LLM4Code #LLM\xa0#BinaryAnalysis #Security'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'jiang719', 'raw': '@jiang719'}, {'type': 'text', 'value': ' Chengxiao Wang, Kevin Liu, Xiangzhe Xu, Xiangyu Zhang, ', 'raw': ' Chengxiao Wang, Kevin Liu, Xiangzhe Xu, Xiangyu Zhang, '}, {'type': 'mention', 'user': 'pbabkin', 'raw': '@pbabkin'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","Introducing Nova (ICLR’25), foundation models for binary/assembly code. We have also released fine-tuned models for binary code decompilation. Preprint: arxiv.org/pdf/2311.13721 This is our follow-up work on binary analysis after our CCS'24 distinguished paper (https://www.linkedin.com/posts/lintan_resym-harnessing-llms-to-recover-variable-activity-7231749452154159105-sEgj) + +Highlights: +1. Nova is built with hierarchical attention specially designed for binary and contrastive learning. +2. Nova is pre-trained on 3B binary and source code tokens. +3. Models: https://huggingface.co/lt-asset/nova-6.7b https://huggingface.co/lt-asset/nova-6.7b-bcr +4. Smaller 1.3B models https://huggingface.co/lt-asset/nova-1.3b… https://huggingface.co/lt-asset/nova-1.3b-bcr + +Binaries are a form of code. Do not forget about binaries when you work on #LLM4Code. + +Why binaries and binary models? Binary code plays an irreplaceable role in crucial tasks, including vulnerability detection, malware detection, binary recovery, and legacy software maintenance. For example, when performing tasks such as identifying attacks and malware, security analysts often only have access to assembly, i.e., the human-readable representation of binary code, which is extremely difficult to understand. Thus, combined with the increasing sophistication of cybercrime that poses significant threats worldwide (e.g., cybercrime is predicted to cost the world $10.5 trillion annually by 2025 (Sausalito, 2020)), effective binary analysis techniques are in high demand. + +#LLM4Code #LLM #BinaryAnalysis #Security + +@jiang719 Chengxiao Wang, Kevin Liu, Xiangzhe Xu, Xiangyu Zhang, @pbabkin +",[],"[{'_id': '629e4ca2f2bda18349b6d330', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/629e4ca2f2bda18349b6d330/gSnGTLm2ugpINECylbuuQ.jpeg', 'fullname': 'Nan Jiang', 'name': 'jiang719', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}, {'_id': '6398cb0e387aa1c3f5b997f4', 'avatarUrl': '/avatars/0a48af195d1f5a493e89b13558fe113d.svg', 'fullname': 'Petr Babkin', 'name': 'pbabkin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}]","[{'reaction': '🔥', 'users': ['lin-tan', 'jiang719', 'John6666', 'JoniJo', 'arshiaafshani', 'JohnRoger'], 'count': 6}]",2025-01-28 19:34:33,2025-01-28 19:34:33.804,[],/posts/lin-tan/229890633852903,1640,"{'language': 'en', 'probability': 0.829581618309021}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/648a374f00f7a3374ee64b99/YPwSOrronoozwHbJchPn3.jpeg,246.0,Caleb Fahlgren,cfahlgren1,288966678298547,"[{'type': 'text', 'value': ""If you haven't seen yet, we just released Inference Providers 🔀"", 'raw': ""If you haven't seen yet, we just released Inference Providers 🔀""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> 4 new serverless inference providers on the Hub 🤯', 'raw': '> 4 new serverless inference providers on the Hub 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Use your HF API key or personal key with all providers 🔑', 'raw': '> Use your HF API key or personal key with all providers 🔑'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Chat with Deepseek R1, V3, and more on HF Hub 🐋', 'raw': '> Chat with Deepseek R1, V3, and more on HF Hub 🐋'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> We support Sambanova, TogetherAI, Replicate, and Fal.ai 💪', 'raw': '> We support Sambanova, TogetherAI, Replicate, and Fal.ai 💪'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Best of all, we don't charge any markup on top of the provider \U0001faf0 Have you tried it out yet? HF Pro accounts get $2 of free usage for the provider inference."", 'raw': ""Best of all, we don't charge any markup on top of the provider \U0001faf0 Have you tried it out yet? HF Pro accounts get $2 of free usage for the provider inference.""}]","If you haven't seen yet, we just released Inference Providers 🔀 + +> 4 new serverless inference providers on the Hub 🤯 +> Use your HF API key or personal key with all providers 🔑 +> Chat with Deepseek R1, V3, and more on HF Hub 🐋 +> We support Sambanova, TogetherAI, Replicate, and Fal.ai 💪 + +Best of all, we don't charge any markup on top of the provider 🫰 Have you tried it out yet? HF Pro accounts get $2 of free usage for the provider inference.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/648a374f00f7a3374ee64b99/boR-xMGpfGp3tK_jzhJJy.png'}]",[],"[{'reaction': '❤️', 'users': ['BrigitteTousi', 'John6666', 'dhruv3006', 'JoniJo'], 'count': 4}, {'reaction': '🔥', 'users': ['John6666', 'JoniJo'], 'count': 2}, {'reaction': '🤝', 'users': ['JoniJOST', 'Ji-Xiang'], 'count': 2}]",2025-01-28 19:08:39,2025-01-28 19:08:39.762,[],/posts/cfahlgren1/288966678298547,2342,"{'language': 'en', 'probability': 0.842444896697998}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,278224424663452,"[{'type': 'text', 'value': '🚀 The open source community is unstoppable: 4M total downloads for DeepSeek models on Hugging Face, with 3.2M coming from the +600 models created by the community.', 'raw': '🚀 The open source community is unstoppable: 4M total downloads for DeepSeek models on Hugging Face, with 3.2M coming from the +600 models created by the community.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""That's 30% more than yesterday!"", 'raw': ""That's 30% more than yesterday!""}]","🚀 The open source community is unstoppable: 4M total downloads for DeepSeek models on Hugging Face, with 3.2M coming from the +600 models created by the community. + +That's 30% more than yesterday!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/lPz1BCRJwcVUphdXOzTDK.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'BrigitteTousi', 'mkurman', 'JoniJo', 'AjayT76'], 'count': 5}, {'reaction': '👍', 'users': ['sansh2356', 'BrigitteTousi', 'JoniJo', 'solwol'], 'count': 4}]",2025-01-28 17:56:01,2025-01-28 18:24:52.332,"[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957, 'isFollowing': False}]",/posts/fdaudens/278224424663452,1725,"{'language': 'en', 'probability': 0.9557908773422241}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg,3938.0,chansung park,chansung,153998475914701,"[{'type': 'text', 'value': ""Simple summary on DeepSeek AI's Janus-Pro: A fresh take on multimodal AI! "", 'raw': ""Simple summary on DeepSeek AI's Janus-Pro: A fresh take on multimodal AI! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It builds on its predecessor, Janus, by tweaking the training methodology rather than the model architecture. The result? Improved performance in understanding and generating multimodal data.', 'raw': 'It builds on its predecessor, Janus, by tweaking the training methodology rather than the model architecture. The result? Improved performance in understanding and generating multimodal data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Janus-Pro uses a three-stage training strategy, similar to Janus, but with key modifications:', 'raw': 'Janus-Pro uses a three-stage training strategy, similar to Janus, but with key modifications:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✦ Stage 1 & 2: Focus on separate training for specific objectives, rather than mixing data.', 'raw': '✦ Stage 1 & 2: Focus on separate training for specific objectives, rather than mixing data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✦ Stage 3: Fine-tuning with a careful balance of multimodal data.', 'raw': '✦ Stage 3: Fine-tuning with a careful balance of multimodal data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benchmarks show Janus-Pro holds its own against specialized models like TokenFlow XL and MetaMorph, and other multimodal models like SD3 Medium and DALL-E 3.', 'raw': 'Benchmarks show Janus-Pro holds its own against specialized models like TokenFlow XL and MetaMorph, and other multimodal models like SD3 Medium and DALL-E 3.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The main limitation? Low image resolution (384x384). However, this seems like a strategic choice to focus on establishing a solid ""recipe"" for multimodal models. Future work will likely leverage this recipe and increased computing power to achieve higher resolutions.', 'raw': 'The main limitation? Low image resolution (384x384). However, this seems like a strategic choice to focus on establishing a solid ""recipe"" for multimodal models. Future work will likely leverage this recipe and increased computing power to achieve higher resolutions.'}]","Simple summary on DeepSeek AI's Janus-Pro: A fresh take on multimodal AI! + +It builds on its predecessor, Janus, by tweaking the training methodology rather than the model architecture. The result? Improved performance in understanding and generating multimodal data. + +Janus-Pro uses a three-stage training strategy, similar to Janus, but with key modifications: +✦ Stage 1 & 2: Focus on separate training for specific objectives, rather than mixing data. +✦ Stage 3: Fine-tuning with a careful balance of multimodal data. + +Benchmarks show Janus-Pro holds its own against specialized models like TokenFlow XL and MetaMorph, and other multimodal models like SD3 Medium and DALL-E 3. + +The main limitation? Low image resolution (384x384). However, this seems like a strategic choice to focus on establishing a solid ""recipe"" for multimodal models. Future work will likely leverage this recipe and increased computing power to achieve higher resolutions.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60d3b57ad7b174177faabd6e/pOqpj2tYPd_SzH-GYTrZ2.png'}]",[],"[{'reaction': '👍', 'users': ['chansung', 'John6666', 'BrigitteTousi', 'grafov', 'bojan2501', 'jhhaz', 'JoniJo', 'Kakou', 'dataplayer12'], 'count': 9}, {'reaction': '❤️', 'users': ['MohammedNaeem', 'JoniJo'], 'count': 2}]",2025-01-28 17:07:13,2025-01-28 17:07:13.812,[],/posts/chansung/153998475914701,2036,"{'language': 'en', 'probability': 0.912796676158905}",0 +/avatars/3dac1c2fca69b3886f087f58909f50fd.svg,212.0,llm,fantaxy,501779207496627,"[{'type': 'text', 'value': '📚 AI Graphic Novel Generator Suite 2025', 'raw': '📚 AI Graphic Novel Generator Suite 2025'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Four Unique Genre Experiences', 'raw': '🎯 Four Unique Genre Experiences'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗡️ Martial Arts Novel Generator', 'raw': '🗡️ Martial Arts Novel Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fantaxy/novel-sorim-en'}, 'url': 'https://huggingface.co/spaces/fantaxy/novel-sorim-en', 'raw': 'https://huggingface.co/spaces/fantaxy/novel-sorim-en'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Epic wuxia storytelling with real-time combat art', 'raw': 'Epic wuxia storytelling with real-time combat art'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Traditional martial arts world visualization', 'raw': 'Traditional martial arts world visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dynamic qi techniques in motion', 'raw': 'Dynamic qi techniques in motion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Beautiful Eastern art style generation', 'raw': 'Beautiful Eastern art style generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💖 Romance Novel Generator', 'raw': '💖 Romance Novel Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fantaxy/novel-romance-en'}, 'url': 'https://huggingface.co/spaces/fantaxy/novel-romance-en', 'raw': 'https://huggingface.co/spaces/fantaxy/novel-romance-en'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Contemporary romance with matching scenes', 'raw': 'Contemporary romance with matching scenes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Emotional moment captures in art', 'raw': 'Emotional moment captures in art'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Modern relationship visualization', 'raw': 'Modern relationship visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Real-time romantic illustrations', 'raw': 'Real-time romantic illustrations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐉 Fantasy Novel Generator', 'raw': '🐉 Fantasy Novel Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fantaxy/novel-fantasy-en'}, 'url': 'https://huggingface.co/spaces/fantaxy/novel-fantasy-en', 'raw': 'https://huggingface.co/spaces/fantaxy/novel-fantasy-en'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Rich fantasy worlds come alive', 'raw': 'Rich fantasy worlds come alive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Magical scenes in stunning detail', 'raw': 'Magical scenes in stunning detail'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Epic quests visualized instantly', 'raw': 'Epic quests visualized instantly'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dynamic fantasy art generation', 'raw': 'Dynamic fantasy art generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔒 Adult Novel Generator', 'raw': '🔒 Adult Novel Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fantaxy/novel-NSFW-en'}, 'url': 'https://huggingface.co/spaces/fantaxy/novel-NSFW-en', 'raw': 'https://huggingface.co/spaces/fantaxy/novel-NSFW-en'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mature content with tasteful art (18+)', 'raw': 'Mature content with tasteful art (18+)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Modern scene visualization', 'raw': 'Modern scene visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Character-focused illustrations', 'raw': 'Character-focused illustrations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sophisticated mood settings', 'raw': 'Sophisticated mood settings'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡ Core Features', 'raw': '⚡ Core Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7000+ token story generation', 'raw': '7000+ token story generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Real-time text-to-art creation', 'raw': 'Real-time text-to-art creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Auto scene illustration', 'raw': 'Auto scene illustration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Continuous story flow', 'raw': 'Continuous story flow'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dynamic image gallery', 'raw': 'Dynamic image gallery'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'HD quality (768x768)', 'raw': 'HD quality (768x768)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ Technical Highlights', 'raw': '🛠️ Technical Highlights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Advanced Flux image generation', 'raw': 'Advanced Flux image generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Story-driven art creation', 'raw': 'Story-driven art creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Genre-optimized visuals', 'raw': 'Genre-optimized visuals'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Seamless integration', 'raw': 'Seamless integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instant visualization', 'raw': 'Instant visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AINovel #GraphicNovel #StoryGeneration #HuggingFace', 'raw': '#AINovel #GraphicNovel #StoryGeneration #HuggingFace'}]","📚 AI Graphic Novel Generator Suite 2025 + +🎯 Four Unique Genre Experiences + +🗡️ Martial Arts Novel Generator +https://huggingface.co/spaces/fantaxy/novel-sorim-en + +Epic wuxia storytelling with real-time combat art +Traditional martial arts world visualization +Dynamic qi techniques in motion +Beautiful Eastern art style generation + +💖 Romance Novel Generator +https://huggingface.co/spaces/fantaxy/novel-romance-en + +Contemporary romance with matching scenes +Emotional moment captures in art +Modern relationship visualization +Real-time romantic illustrations + +🐉 Fantasy Novel Generator +https://huggingface.co/spaces/fantaxy/novel-fantasy-en + +Rich fantasy worlds come alive +Magical scenes in stunning detail +Epic quests visualized instantly +Dynamic fantasy art generation + +🔒 Adult Novel Generator +https://huggingface.co/spaces/fantaxy/novel-NSFW-en + +Mature content with tasteful art (18+) +Modern scene visualization +Character-focused illustrations +Sophisticated mood settings + +⚡ Core Features + +7000+ token story generation +Real-time text-to-art creation +Auto scene illustration +Continuous story flow +Dynamic image gallery +HD quality (768x768) + +🛠️ Technical Highlights + +Advanced Flux image generation +Story-driven art creation +Genre-optimized visuals +Seamless integration +Instant visualization + +#AINovel #GraphicNovel #StoryGeneration #HuggingFace","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66333c7887ce9a8935ff5738/K2GHJGJdBv7rOn5kdT9b3.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66333c7887ce9a8935ff5738/pN8aWwXvaiBsdmga2hDGZ.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66333c7887ce9a8935ff5738/3oMEbNR4FvapNlhGpc8C_.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/66333c7887ce9a8935ff5738/Bt8v4NgLNvcI_YIFHjzLh.webp'}]",[],"[{'reaction': '🔥', 'users': ['mple33', 'newyorkcheese', 'adminnews', 'seawolf2357', 'aiqcamp', 'ginipick', 'fantaxy', 'fantos', 'gunship999', 'immunobiotech', 'kolaslab', 'cutechicken', 'aiqtech', 'truetekken', 'lscsetepfa', 'John6666', 'hugsfacelover', 'yoeldcd', 'milleumcorus', 'songstepcomer', 'stardust23', 'kowougadian', 'soundofmindexict', 'novermer6th', 'maseriumpark', 'andreavictory', 'persecterA', 'acvchoclate', 'outofammorack', 'prideandsoul', 'compresserj', 'ministophighgo', 'roland0822', 'travian777', 'quantummaverick', 'orbtailwaves23', 'echloverse99', 'rustydreampak', 'skyboundtrailer', 'starforge42x7', 'andrayolek', 'danieellll1997', 'balemirror', 'jetfly2007', 'williamwark', 'lastdolphin', 'truetechlean', 'divisionunsere', 'pigeontow', 'Nielly', 'Azazkhan8866', 'LERATO112', 'rupenderaps'], 'count': 53}, {'reaction': '🚀', 'users': ['fantaxy', 'adminnews', 'seawolf2357', 'aiqcamp', 'ginipick', 'fantos', 'gunship999', 'immunobiotech', 'kolaslab', 'cutechicken', 'aiqtech', 'truetekken', 'mple33', 'lscsetepfa', 'John6666', 'hugsfacelover', 'songstepcomer', 'milleumcorus', 'stardust23', 'soundofmindexict', 'travian777', 'quantummaverick', 'rustydreampak', 'skyboundtrailer', 'andrayolek', 'newyorkcheese', 'balemirror'], 'count': 27}, {'reaction': '👀', 'users': ['fantaxy', 'adminnews', 'seawolf2357', 'aiqcamp', 'ginipick', 'fantos', 'gunship999', 'kolaslab', 'cutechicken', 'aiqtech', 'truetekken', 'mple33', 'lscsetepfa', 'hugsfacelover', 'songstepcomer', 'milleumcorus', 'quantummaverick', 'codenger2005', 'rustydreampak'], 'count': 19}, {'reaction': '❤️', 'users': ['fantaxy', 'adminnews', 'seawolf2357', 'aiqcamp', 'fantos', 'gunship999', 'kolaslab', 'aiqtech', 'truetekken', 'mple33', 'hugsfacelover', 'codenger2005', 'ginipick', 'Akseltinfat'], 'count': 14}, {'reaction': '😎', 'users': ['fantaxy', 'gunship999', 'kolaslab', 'aiqtech', 'mple33', 'hugsfacelover', 'aiqcamp', 'LERATO112', 'ginipick'], 'count': 9}, {'reaction': '🤗', 'users': ['fantaxy', 'adminnews', 'gunship999', 'kolaslab', 'aiqtech', 'mple33', 'hugsfacelover', 'aiqcamp', 'ginipick'], 'count': 9}, {'reaction': '➕', 'users': ['fantaxy', 'gunship999', 'kolaslab', 'cutechicken', 'truetekken', 'mple33', 'aiqcamp', 'ginipick'], 'count': 8}, {'reaction': '😔', 'users': ['fantaxy', 'gunship999', 'cutechicken', 'aiqtech', 'kolaslab', 'aiqcamp'], 'count': 6}, {'reaction': '🧠', 'users': ['fantaxy', 'gunship999', 'mple33', 'kolaslab', 'aiqcamp'], 'count': 5}, {'reaction': '👍', 'users': ['fantaxy', 'gunship999', 'kolaslab', 'aiqcamp', 'ginipick'], 'count': 5}, {'reaction': '🤝', 'users': ['fantaxy', 'gunship999', 'cutechicken', 'kolaslab', 'aiqcamp'], 'count': 5}, {'reaction': '🤯', 'users': ['fantaxy', 'gunship999', 'cutechicken', 'kolaslab', 'aiqcamp'], 'count': 5}]",2025-01-25 03:27:00,2025-05-10 13:49:30.307,"[{'_id': '67edde88b959778460441ad6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/sjdP4ckGGKNZd0WXVcuSp.png', 'fullname': 'El Arbi EL ADLOUNI', 'name': 'elarbiemail', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6321d1e7a418a789a237a0ba', 'avatarUrl': '/avatars/67273e4777d643c97e8970f81a023d43.svg', 'fullname': 'A2va', 'name': 'A2va', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/fantaxy/501779207496627,9044,"{'language': 'en', 'probability': 0.70051109790802}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,601938793771243,"[{'type': 'text', 'value': 'So 🐋DeepSeek🐋 hits the mainstream media. But it has been a star in our little cult for at least 6 months. Its meteoric success is not overnight, but two years in the making.', 'raw': 'So 🐋DeepSeek🐋 hits the mainstream media. But it has been a star in our little cult for at least 6 months. Its meteoric success is not overnight, but two years in the making.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To learn their history, just look at their 🤗 repo ', 'raw': 'To learn their history, just look at their 🤗 repo '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'deepseek-ai'}, 'url': 'https://huggingface.co/deepseek-ai', 'raw': 'https://huggingface.co/deepseek-ai', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538815d1bdb3c40db94fbfa/xMBly9PUMphrFVMxLX4kq.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* End of 2023, they launched the first model (pretrained by themselves) following Llama 2 architecture', 'raw': '* End of 2023, they launched the first model (pretrained by themselves) following Llama 2 architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* June 2024, v2 (MoE architecture) surpassed Gemini 1.5, but behind Mistral', 'raw': '* June 2024, v2 (MoE architecture) surpassed Gemini 1.5, but behind Mistral'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* September, v2.5 surpassed GPT 4o mini', 'raw': '* September, v2.5 surpassed GPT 4o mini'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* December, v3 surpassed GPT 4o', 'raw': '* December, v3 surpassed GPT 4o'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Now R1 surpassed o1', 'raw': '* Now R1 surpassed o1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Most importantly, if you think DeepSeek success is singular and unrivaled, that's WRONG. The following models are also near or equal the o1 bar."", 'raw': ""Most importantly, if you think DeepSeek success is singular and unrivaled, that's WRONG. The following models are also near or equal the o1 bar.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Minimax-01', 'raw': '* Minimax-01'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Kimi k1.5', 'raw': '* Kimi k1.5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Doubao 1.5 pro', 'raw': '* Doubao 1.5 pro'}]","So 🐋DeepSeek🐋 hits the mainstream media. But it has been a star in our little cult for at least 6 months. Its meteoric success is not overnight, but two years in the making. + +To learn their history, just look at their 🤗 repo https://huggingface.co/deepseek-ai + +* End of 2023, they launched the first model (pretrained by themselves) following Llama 2 architecture +* June 2024, v2 (MoE architecture) surpassed Gemini 1.5, but behind Mistral +* September, v2.5 surpassed GPT 4o mini +* December, v3 surpassed GPT 4o +* Now R1 surpassed o1 + +Most importantly, if you think DeepSeek success is singular and unrivaled, that's WRONG. The following models are also near or equal the o1 bar. + +* Minimax-01 +* Kimi k1.5 +* Doubao 1.5 pro",[],[],"[{'reaction': '👍', 'users': ['John6666', 'adityamahakali', 'yoeldcd', 'AtAndDev', 'nikhzx', 'YaTharThShaRma999', 'wangbinyq', 'Nielly', 'poterliu', 'Winnougan', 'DataSoul', 'ocramz', 'xi0v'], 'count': 13}, {'reaction': '🔥', 'users': ['YaTharThShaRma999', 'rksiitd', 'xi0v', 'djuna'], 'count': 4}]",2025-01-25 01:38:47,2025-01-28 19:28:41.721,"[{'_id': '669dbd709a4bf63e08f1ddc2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png', 'fullname': 'Yi Cui', 'name': 'onekq', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/onekq/601938793771243,2317,"{'language': 'en', 'probability': 0.9185019135475159}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png,1048.0,Hexgrad,hexgrad,393112447570516,"[{'type': 'text', 'value': 'IMHO, being able & willing to defeat CAPTCHA, hCaptcha, or any other reasoning puzzle is a must-have for any Web-Browsing / Computer-Using Agent (WB/CUA).', 'raw': 'IMHO, being able & willing to defeat CAPTCHA, hCaptcha, or any other reasoning puzzle is a must-have for any Web-Browsing / Computer-Using Agent (WB/CUA).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I realize it subverts the purpose of CAPTCHA, but I do not think you can claim to be building AGI/agents without smoothly passing humanity checks. It would be like getting in a self-driving car that requires human intervention over speed bumps. Claiming AGI or even ""somewhat powerful AI"" seems hollow if you are halted by a mere CAPTCHA.', 'raw': 'I realize it subverts the purpose of CAPTCHA, but I do not think you can claim to be building AGI/agents without smoothly passing humanity checks. It would be like getting in a self-driving car that requires human intervention over speed bumps. Claiming AGI or even ""somewhat powerful AI"" seems hollow if you are halted by a mere CAPTCHA.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I imagine OpenAI's Operator is *able* but *not willing* to defeat CAPTCHA. Like their non-profit status, I expect that policy to evolve over time—and if not, rival agent-builders will attack that opening to offer a better product."", 'raw': ""I imagine OpenAI's Operator is *able* but *not willing* to defeat CAPTCHA. Like their non-profit status, I expect that policy to evolve over time—and if not, rival agent-builders will attack that opening to offer a better product.""}]","IMHO, being able & willing to defeat CAPTCHA, hCaptcha, or any other reasoning puzzle is a must-have for any Web-Browsing / Computer-Using Agent (WB/CUA). + +I realize it subverts the purpose of CAPTCHA, but I do not think you can claim to be building AGI/agents without smoothly passing humanity checks. It would be like getting in a self-driving car that requires human intervention over speed bumps. Claiming AGI or even ""somewhat powerful AI"" seems hollow if you are halted by a mere CAPTCHA. + +I imagine OpenAI's Operator is *able* but *not willing* to defeat CAPTCHA. Like their non-profit status, I expect that policy to evolve over time—and if not, rival agent-builders will attack that opening to offer a better product.",[],[],"[{'reaction': '❤️', 'users': ['codeSeed', 'BrockMisner', 'John6666', 'nyuuzyou', 'Kerassy', 'unrealmarc', 'fireblade2534'], 'count': 7}, {'reaction': '➕', 'users': ['euphoria83'], 'count': 1}]",2025-01-24 21:04:28,2025-01-28 18:14:25.552,"[{'_id': '654314fe2997b38f4f9fa7b9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/654314fe2997b38f4f9fa7b9/OHSPXzsYCwFHd3fsep9f3.png', 'fullname': 'Christopher Chenoweth', 'name': 'ThreadAbort', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}]",/posts/hexgrad/393112447570516,4043,"{'language': 'en', 'probability': 0.9272972941398621}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,813236614223109,"[{'type': 'text', 'value': ""Oof, what a week! 🥵 So many things have happened, let's recap! "", 'raw': ""Oof, what a week! 🥵 So many things have happened, let's recap! ""}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/jan-24-releases-6793d610774073328eac67a9'}, 'url': 'https://huggingface.co/collections/merve/jan-24-releases-6793d610774073328eac67a9', 'raw': 'https://huggingface.co/collections/merve/jan-24-releases-6793d610774073328eac67a9'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Multimodal 💬', 'raw': 'Multimodal 💬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- We have released SmolVLM -- tiniest VLMs that come in 256M and 500M, with it's retrieval models ColSmol for multimodal RAG 💗"", 'raw': ""- We have released SmolVLM -- tiniest VLMs that come in 256M and 500M, with it's retrieval models ColSmol for multimodal RAG 💗""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- UI-TARS are new models by ByteDance to unlock agentic GUI control 🤯 in 2B, 7B and 72B', 'raw': '- UI-TARS are new models by ByteDance to unlock agentic GUI control 🤯 in 2B, 7B and 72B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Alibaba DAMO lab released VideoLlama3, new video LMs that come in 2B and 7B', 'raw': '- Alibaba DAMO lab released VideoLlama3, new video LMs that come in 2B and 7B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MiniMaxAI released Minimax-VL-01, where decoder is based on MiniMax-Text-01 456B MoE model with long context ', 'raw': '- MiniMaxAI released Minimax-VL-01, where decoder is based on MiniMax-Text-01 456B MoE model with long context '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dataset: Yale released a new benchmark called MMVU', 'raw': '- Dataset: Yale released a new benchmark called MMVU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Dataset: CAIS released Humanity's Last Exam (HLE) a new challenging MM benchmark"", 'raw': ""- Dataset: CAIS released Humanity's Last Exam (HLE) a new challenging MM benchmark""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLMs 📖', 'raw': 'LLMs 📖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- DeepSeek-R1 & DeepSeek-R1-Zero: gigantic 660B reasoning models by DeepSeek, and six distilled dense models, on par with o1 with MIT license! 🤯', 'raw': '- DeepSeek-R1 & DeepSeek-R1-Zero: gigantic 660B reasoning models by DeepSeek, and six distilled dense models, on par with o1 with MIT license! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Qwen2.5-Math-PRM: new math models by Qwen in 7B and 72B', 'raw': '- Qwen2.5-Math-PRM: new math models by Qwen in 7B and 72B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- NVIDIA released AceMath and AceInstruct, new family of models and their datasets (SFT and reward ones too!)', 'raw': '- NVIDIA released AceMath and AceInstruct, new family of models and their datasets (SFT and reward ones too!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Audio 🗣️', 'raw': 'Audio 🗣️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Llasa is a new speech synthesis model based on Llama that comes in 1B,3B, and 8B', 'raw': '- Llasa is a new speech synthesis model based on Llama that comes in 1B,3B, and 8B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- TangoFlux is a new audio generation model trained from scratch and aligned with CRPO', 'raw': '- TangoFlux is a new audio generation model trained from scratch and aligned with CRPO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Image/Video/3D Generation ⏯️', 'raw': 'Image/Video/3D Generation ⏯️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Flex.1-alpha is a new 8B pre-trained diffusion model by ostris similar to Flux', 'raw': '- Flex.1-alpha is a new 8B pre-trained diffusion model by ostris similar to Flux'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- tencent released Hunyuan3D-2, new 3D asset generation from images', 'raw': '- tencent released Hunyuan3D-2, new 3D asset generation from images'}]","Oof, what a week! 🥵 So many things have happened, let's recap! https://huggingface.co/collections/merve/jan-24-releases-6793d610774073328eac67a9 + +Multimodal 💬 +- We have released SmolVLM -- tiniest VLMs that come in 256M and 500M, with it's retrieval models ColSmol for multimodal RAG 💗 +- UI-TARS are new models by ByteDance to unlock agentic GUI control 🤯 in 2B, 7B and 72B +- Alibaba DAMO lab released VideoLlama3, new video LMs that come in 2B and 7B +- MiniMaxAI released Minimax-VL-01, where decoder is based on MiniMax-Text-01 456B MoE model with long context +- Dataset: Yale released a new benchmark called MMVU +- Dataset: CAIS released Humanity's Last Exam (HLE) a new challenging MM benchmark + +LLMs 📖 +- DeepSeek-R1 & DeepSeek-R1-Zero: gigantic 660B reasoning models by DeepSeek, and six distilled dense models, on par with o1 with MIT license! 🤯 +- Qwen2.5-Math-PRM: new math models by Qwen in 7B and 72B +- NVIDIA released AceMath and AceInstruct, new family of models and their datasets (SFT and reward ones too!) + +Audio 🗣️ +- Llasa is a new speech synthesis model based on Llama that comes in 1B,3B, and 8B +- TangoFlux is a new audio generation model trained from scratch and aligned with CRPO + +Image/Video/3D Generation ⏯️ +- Flex.1-alpha is a new 8B pre-trained diffusion model by ostris similar to Flux +- tencent released Hunyuan3D-2, new 3D asset generation from images","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/EloJMIH7wS72U8hIe_nMw.png'}]",[],"[{'reaction': '🔥', 'users': ['jsulz', 'BrigitteTousi', 'atasoglu', 'alvarobartt', 'peetzweg', 'burtenshaw', 'clem', 'rwightman', 'Citaman', 'm-ric', 'xianbao', 'qgallouedec', 'calmodovar', 'John6666', 'boksiora', 'misha3453', 'kylewascher', 'junyeong-nero', 'Akhil-Theerthala', 'talaviyabhavik', 'lucid-gunner', '6cf', 'Paul-HF', 'ucsahin', 'jbilcke-hf', 'polats', 'Joewasgiven', 'nada-mah', 'b10ana'], 'count': 29}, {'reaction': '🤗', 'users': ['alvarobartt', 'burtenshaw', 'clem', 'John6666', 'boksiora', '6cf', 'Paul-HF', 'jbilcke-hf'], 'count': 8}, {'reaction': '👀', 'users': ['alvarobartt', 'burtenshaw', 'clem', '6cf'], 'count': 4}, {'reaction': '👍', 'users': ['Melike', 'wsuff', '6cf', 'pagezyhf'], 'count': 4}, {'reaction': '🧠', 'users': ['Tonic', '6cf'], 'count': 2}, {'reaction': '➕', 'users': ['tbkhori'], 'count': 1}]",2025-01-24 20:17:15,2025-01-27 13:38:31.938,"[{'_id': '63691c3eda9b693c2730b2a2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63691c3eda9b693c2730b2a2/hBtKpgo3_9003MWCGkw5d.png', 'fullname': 'Brigitte Tousignant', 'name': 'BrigitteTousi', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 256, 'isFollowing': False}, {'_id': '67489ef40aa1d7e51b667fc2', 'avatarUrl': '/avatars/143d5ed8ebd915067596ad1745cb50d3.svg', 'fullname': '', 'name': 'usama121', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '60f0608166e5701b80ed3f02', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60f0608166e5701b80ed3f02/BHso-wSWpR9b8b8CKvodC.jpeg', 'fullname': 'Alvaro Bartolome', 'name': 'alvarobartt', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1920, 'isFollowing': False}, {'_id': '631ce4b244503b72277fc89f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1677431596830-631ce4b244503b72277fc89f.jpeg', 'fullname': 'Quentin Gallouédec', 'name': 'qgallouedec', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 348, 'isFollowing': False}, {'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}, {'_id': '676c67da7bad1587f2d046e2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/676c67da7bad1587f2d046e2/Y-aJmRj9FB49n36X1qKo9.jpeg', 'fullname': 'Akhil Theerthala', 'name': 'Akhil-Theerthala', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}]",/posts/merve/813236614223109,5445,"{'language': 'en', 'probability': 0.8884541988372803}",7 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,525743780885793,"[{'type': 'text', 'value': 'The 🐳 just crossed 10,000 followers on HF ', 'raw': 'The 🐳 just crossed 10,000 followers on HF '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'deepseek-ai'}, 'url': 'https://huggingface.co/deepseek-ai', 'raw': 'https://huggingface.co/deepseek-ai', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538815d1bdb3c40db94fbfa/xMBly9PUMphrFVMxLX4kq.png'}]","The 🐳 just crossed 10,000 followers on HF + +https://huggingface.co/deepseek-ai","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/K9FJ12IsjvxLlsTT-0T84.png'}]",[],"[{'reaction': '🔥', 'users': ['AdinaY', 'jsulz', 'onekq', 'bikdroid', 'John6666', 'Nielly', 'rksiitd', 'PilotSB', 'nicolay-r', 'DamarJati'], 'count': 10}, {'reaction': '🚀', 'users': ['jsulz', 'PilotSB', 'nicolay-r'], 'count': 3}, {'reaction': '❤️', 'users': ['rksiitd', 's-emanuilov'], 'count': 2}, {'reaction': '😎', 'users': ['PilotSB'], 'count': 1}]",2025-01-24 20:15:08,2025-01-24 20:15:08.623,[],/posts/clem/525743780885793,2474,"{'language': 'en', 'probability': 0.7840662002563477}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1672164046414-624b4a964056e2a6914a05c5.png,2779.0,Dylan Ebert,dylanebert,344742688614414,"[{'type': 'text', 'value': '⚙️ Convert ', 'raw': '⚙️ Convert '}, {'type': 'inline_code', 'code': '.ply', 'raw': '`.ply`'}, {'type': 'text', 'value': ' to ', 'raw': ' to '}, {'type': 'inline_code', 'code': '.splat', 'raw': '`.splat`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""i've created a simple space to convert .ply gaussian splat files to .splat format"", 'raw': ""i've created a simple space to convert .ply gaussian splat files to .splat format""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'dylanebert/ply-to-splat'}, 'url': 'https://huggingface.co/spaces/dylanebert/ply-to-splat', 'raw': 'https://huggingface.co/spaces/dylanebert/ply-to-splat'}]","⚙️ Convert `.ply` to `.splat` + +i've created a simple space to convert .ply gaussian splat files to .splat format + +https://huggingface.co/spaces/dylanebert/ply-to-splat",[],[],"[{'reaction': '👍', 'users': ['John6666', 'DamianBoborzi'], 'count': 2}]",2025-01-24 19:50:37,2025-01-24 19:50:37.153,[],/posts/dylanebert/344742688614414,750,"{'language': 'en', 'probability': 0.4040106534957886}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,624313115582088,"[{'type': 'text', 'value': 'smolagents can see 🔥', 'raw': 'smolagents can see 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'we just shipped vision support to smolagents 🤗 agentic computers FTW', 'raw': 'we just shipped vision support to smolagents 🤗 agentic computers FTW'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'you can now:', 'raw': 'you can now:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻 let the agent get images dynamically (e.g. agentic web browser) ', 'raw': '💻 let the agent get images dynamically (e.g. agentic web browser) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📑 pass images at the init of the agent (e.g. chatting with documents, filling forms automatically etc)', 'raw': '📑 pass images at the init of the agent (e.g. chatting with documents, filling forms automatically etc)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'with few LoC change! 🤯', 'raw': 'with few LoC change! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'you can use transformers models locally (like Qwen2VL) OR plug-in your favorite multimodal inference provider (gpt-4o, antrophic & co) 🤠', 'raw': 'you can use transformers models locally (like Qwen2VL) OR plug-in your favorite multimodal inference provider (gpt-4o, antrophic & co) 🤠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'read our blog ', 'raw': 'read our blog '}, {'type': 'link', 'href': 'http://hf.co/blog/smolagents-can-see', 'raw': 'http://hf.co/blog/smolagents-can-see'}, {'type': 'new_line', 'raw': '\n'}]","smolagents can see 🔥 +we just shipped vision support to smolagents 🤗 agentic computers FTW + +you can now: +💻 let the agent get images dynamically (e.g. agentic web browser) +📑 pass images at the init of the agent (e.g. chatting with documents, filling forms automatically etc) +with few LoC change! 🤯 +you can use transformers models locally (like Qwen2VL) OR plug-in your favorite multimodal inference provider (gpt-4o, antrophic & co) 🤠 + +read our blog http://hf.co/blog/smolagents-can-see +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/eEjQGoo7bwf0zjmxP6bSb.mp4'}]",[],"[{'reaction': '🚀', 'users': ['Dref360', 'John6666', 'britny', 'sugatoray', 'atasoglu', 'KingNish', 'theainerd', 'sriramvasu'], 'count': 8}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}, {'reaction': '😎', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['Akash3698'], 'count': 1}]",2025-01-24 18:03:45,2025-01-24 18:03:45.586,[],/posts/merve/624313115582088,2321,"{'language': 'en', 'probability': 0.7621671557426453}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/666b9ef5e6c60b6fc4156675/av6g3oEeQQ4Rpa_IBFfaT.jpeg,25.0,Samuel Lima Braz,samuellimabraz,453442787220285,"[{'type': 'text', 'value': 'I wrote a article on Parameter-Efficient Fine-Tuning (PEFT), exploring techniques for efficient fine-tuning in LLMs, their implementations, and variations. ', 'raw': 'I wrote a article on Parameter-Efficient Fine-Tuning (PEFT), exploring techniques for efficient fine-tuning in LLMs, their implementations, and variations. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The study is based on the article ""Scaling Down to Scale Up: A Guide to Parameter-Efficient Fine-Tuning"" and the PEFT library integrated with Hugging Face\'s Transformers. ', 'raw': 'The study is based on the article ""Scaling Down to Scale Up: A Guide to Parameter-Efficient Fine-Tuning"" and the PEFT library integrated with Hugging Face\'s Transformers. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Article: ', 'raw': 'Article: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/samuellimabraz/peft-methods', 'raw': 'https://huggingface.co/blog/samuellimabraz/peft-methods'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Notebook: ', 'raw': 'Notebook: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1B9RsKLMa8SwTxLsxRT8g9OedK10zfBEP?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1B9RsKLMa8SwTxLsxRT8g9OedK10zfBEP?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection: ', 'raw': 'Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'samuellimabraz/service-summary-6793ccfe774073328ea9f8df'}, 'url': 'https://huggingface.co/collections/samuellimabraz/service-summary-6793ccfe774073328ea9f8df', 'raw': 'https://huggingface.co/collections/samuellimabraz/service-summary-6793ccfe774073328ea9f8df'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Analyzed methods:', 'raw': 'Analyzed methods:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Adapters: Soft Prompts (Prompt Tuning, Prefix Tuning, P-tuning), IA³.', 'raw': '- Adapters: Soft Prompts (Prompt Tuning, Prefix Tuning, P-tuning), IA³.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Reparameterization: LoRA, QLoRA, LoHa, LoKr, X-LoRA, Intrinsic SAID, and variations of initializations (PiSSA, OLoRA, rsLoRA, DoRA).', 'raw': '- Reparameterization: LoRA, QLoRA, LoHa, LoKr, X-LoRA, Intrinsic SAID, and variations of initializations (PiSSA, OLoRA, rsLoRA, DoRA).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Selective Tuning: BitFit, DiffPruning, FAR, FishMask.', 'raw': '- Selective Tuning: BitFit, DiffPruning, FAR, FishMask.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm starting out in generative AI, I have more experience with computer vision and robotics. Just sharing here 🤗 "", 'raw': ""I'm starting out in generative AI, I have more experience with computer vision and robotics. Just sharing here 🤗 ""}]","I wrote a article on Parameter-Efficient Fine-Tuning (PEFT), exploring techniques for efficient fine-tuning in LLMs, their implementations, and variations. + +The study is based on the article ""Scaling Down to Scale Up: A Guide to Parameter-Efficient Fine-Tuning"" and the PEFT library integrated with Hugging Face's Transformers. + +Article: https://huggingface.co/blog/samuellimabraz/peft-methods +Notebook: https://colab.research.google.com/drive/1B9RsKLMa8SwTxLsxRT8g9OedK10zfBEP?usp=sharing +Collection: https://huggingface.co/collections/samuellimabraz/service-summary-6793ccfe774073328ea9f8df + +Analyzed methods: +- Adapters: Soft Prompts (Prompt Tuning, Prefix Tuning, P-tuning), IA³. +- Reparameterization: LoRA, QLoRA, LoHa, LoKr, X-LoRA, Intrinsic SAID, and variations of initializations (PiSSA, OLoRA, rsLoRA, DoRA). +- Selective Tuning: BitFit, DiffPruning, FAR, FishMask. + +I'm starting out in generative AI, I have more experience with computer vision and robotics. Just sharing here 🤗 ",[],[],"[{'reaction': '👍', 'users': ['John6666', 'nicholasKluge', 'LucasBer'], 'count': 3}]",2025-01-24 17:30:43,2025-01-24 17:31:34.336,[],/posts/samuellimabraz/453442787220285,477,"{'language': 'en', 'probability': 0.7629308104515076}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,446893906051647,"[{'type': 'text', 'value': 'Today we make the biggest release in smolagents so far: 𝘄𝗲 𝗲𝗻𝗮𝗯𝗹𝗲 𝘃𝗶𝘀𝗶𝗼𝗻 𝗺𝗼𝗱𝗲𝗹𝘀, 𝘄𝗵𝗶𝗰𝗵 𝗮𝗹𝗹𝗼𝘄𝘀 𝘁𝗼 𝗯𝘂𝗶𝗹𝗱 𝗽𝗼𝘄𝗲𝗿𝗳𝘂𝗹 𝘄𝗲𝗯 𝗯𝗿𝗼𝘄𝘀𝗶𝗻𝗴 𝗮𝗴𝗲𝗻𝘁𝘀! 🥳', 'raw': 'Today we make the biggest release in smolagents so far: 𝘄𝗲 𝗲𝗻𝗮𝗯𝗹𝗲 𝘃𝗶𝘀𝗶𝗼𝗻 𝗺𝗼𝗱𝗲𝗹𝘀, 𝘄𝗵𝗶𝗰𝗵 𝗮𝗹𝗹𝗼𝘄𝘀 𝘁𝗼 𝗯𝘂𝗶𝗹𝗱 𝗽𝗼𝘄𝗲𝗿𝗳𝘂𝗹 𝘄𝗲𝗯 𝗯𝗿𝗼𝘄𝘀𝗶𝗻𝗴 𝗮𝗴𝗲𝗻𝘁𝘀! 🥳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our agents can now casually open up a web browser, and navigate on it by scrolling, clicking elements on the webpage, going back, just like a user would.', 'raw': 'Our agents can now casually open up a web browser, and navigate on it by scrolling, clicking elements on the webpage, going back, just like a user would.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The demo below shows Claude-3.5-Sonnet browsing GitHub for task: ""Find how many commits the author of the current top trending repo did over last year.""', 'raw': 'The demo below shows Claude-3.5-Sonnet browsing GitHub for task: ""Find how many commits the author of the current top trending repo did over last year.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hi ', 'raw': 'Hi '}, {'type': 'mention', 'user': 'mlabonne', 'raw': '@mlabonne'}, {'type': 'text', 'value': ' !', 'raw': ' !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Go try it out, it's the most cracked agentic stuff I've seen in a while 🤯 (well, along with OpenAI's Operator who beat us by one day)"", 'raw': ""Go try it out, it's the most cracked agentic stuff I've seen in a while 🤯 (well, along with OpenAI's Operator who beat us by one day)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For more detail, read our announcement blog 👉 ', 'raw': 'For more detail, read our announcement blog 👉 '}, {'type': 'link', 'href': 'https://huggingface.co/blog/smolagents-can-see', 'raw': 'https://huggingface.co/blog/smolagents-can-see'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The code for the web browser example is here 👉 ', 'raw': 'The code for the web browser example is here 👉 '}, {'type': 'link', 'href': 'https://github.com/huggingface/smolagents/blob/main/examples/vlm_web_browser.py', 'raw': 'https://github.com/huggingface/smolagents/blob/main/examples/vlm_web_browser.py'}]","Today we make the biggest release in smolagents so far: 𝘄𝗲 𝗲𝗻𝗮𝗯𝗹𝗲 𝘃𝗶𝘀𝗶𝗼𝗻 𝗺𝗼𝗱𝗲𝗹𝘀, 𝘄𝗵𝗶𝗰𝗵 𝗮𝗹𝗹𝗼𝘄𝘀 𝘁𝗼 𝗯𝘂𝗶𝗹𝗱 𝗽𝗼𝘄𝗲𝗿𝗳𝘂𝗹 𝘄𝗲𝗯 𝗯𝗿𝗼𝘄𝘀𝗶𝗻𝗴 𝗮𝗴𝗲𝗻𝘁𝘀! 🥳 + +Our agents can now casually open up a web browser, and navigate on it by scrolling, clicking elements on the webpage, going back, just like a user would. + +The demo below shows Claude-3.5-Sonnet browsing GitHub for task: ""Find how many commits the author of the current top trending repo did over last year."" +Hi @mlabonne ! + +Go try it out, it's the most cracked agentic stuff I've seen in a while 🤯 (well, along with OpenAI's Operator who beat us by one day) + +For more detail, read our announcement blog 👉 https://huggingface.co/blog/smolagents-can-see +The code for the web browser example is here 👉 https://github.com/huggingface/smolagents/blob/main/examples/vlm_web_browser.py","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/Y0YNacI6e6o7YSjkqsJfm.mp4'}]","[{'_id': '61b8e2ba285851687028d395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/Rq3xWG7mJ3aCRoBsq340h.jpeg', 'fullname': 'Maxime Labonne', 'name': 'mlabonne', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5406}]","[{'reaction': '🔥', 'users': ['Rizki-firman', 'merve', 'mlabonne', 'Nymbo', 'clem', 'kgourgou', 'John6666', 's0me-0ne', 'sayedM', 'jeffwadsworth', 'DamarJati', 'super-cinnamon'], 'count': 12}, {'reaction': '❤️', 'users': ['merve', 'mlabonne', 'clem'], 'count': 3}, {'reaction': '👀', 'users': ['merve', 'mlabonne'], 'count': 2}, {'reaction': '👍', 'users': ['ravi4198', 'lab212'], 'count': 2}, {'reaction': '🚀', 'users': ['Dref360'], 'count': 1}]",2025-01-24 16:44:18,2025-01-25 19:49:47.690,"[{'_id': '61b8e2ba285851687028d395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/Rq3xWG7mJ3aCRoBsq340h.jpeg', 'fullname': 'Maxime Labonne', 'name': 'mlabonne', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5406, 'isFollowing': False}, {'_id': '643ead1f4fa8bccfd721ea2b', 'avatarUrl': '/avatars/b34c1d0bdd87b3a091b730b7e9a4f628.svg', 'fullname': 'RAHUL YASHWANTKUMAR GUPTA', 'name': 'ryg81', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '67922f9fb7c3dc07f41f9cf8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67922f9fb7c3dc07f41f9cf8/kPwKwnoLmtXBh97WgGYR_.jpeg', 'fullname': 'Eng', 'name': 'VikEngine', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/m-ric/446893906051647,3455,"{'language': 'en', 'probability': 0.8471513986587524}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,984192466066287,"[{'type': 'text', 'value': 'Baichuan is making big moves today 🔥', 'raw': 'Baichuan is making big moves today 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Launched All-Scenario Reasoning Model (language, visual, and search reasoning capabilities) , with medical expertise as one of its key highlights.', 'raw': '✨ Launched All-Scenario Reasoning Model (language, visual, and search reasoning capabilities) , with medical expertise as one of its key highlights.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://ying.baichuan-ai.com/chat', 'raw': 'https://ying.baichuan-ai.com/chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Released Baichuan-M1-14B Medical LLM on the hub', 'raw': '✨ Released Baichuan-M1-14B Medical LLM on the hub'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Available in both Base and Instruct versions, support English & Chinese.', 'raw': ' Available in both Base and Instruct versions, support English & Chinese.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'baichuan-inc/Baichuan-M1-14B-Base'}, 'url': 'https://huggingface.co/baichuan-inc/Baichuan-M1-14B-Base', 'raw': 'https://huggingface.co/baichuan-inc/Baichuan-M1-14B-Base'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'baichuan-inc/Baichuan-M1-14B-Instruct'}, 'url': 'https://huggingface.co/baichuan-inc/Baichuan-M1-14B-Instruct', 'raw': 'https://huggingface.co/baichuan-inc/Baichuan-M1-14B-Instruct'}, {'type': 'new_line', 'raw': '\n'}]","Baichuan is making big moves today 🔥 + +✨ Launched All-Scenario Reasoning Model (language, visual, and search reasoning capabilities) , with medical expertise as one of its key highlights. +https://ying.baichuan-ai.com/chat + +✨ Released Baichuan-M1-14B Medical LLM on the hub + Available in both Base and Instruct versions, support English & Chinese. + +Model: +https://huggingface.co/baichuan-inc/Baichuan-M1-14B-Base +https://huggingface.co/baichuan-inc/Baichuan-M1-14B-Instruct +",[],[],"[{'reaction': '🔥', 'users': ['shanchen', 'prithivMLmods', 'John6666'], 'count': 3}]",2025-01-24 14:08:04,2025-01-24 14:08:04.000,[],/posts/AdinaY/984192466066287,1396,"{'language': 'en', 'probability': 0.8276522159576416}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,808939774016887,"[{'type': 'text', 'value': 'A general FYI that Valiant Labs no longer has an X account. This is a business decision. Many other businesses seem to be making the same decision right now.', 'raw': 'A general FYI that Valiant Labs no longer has an X account. This is a business decision. Many other businesses seem to be making the same decision right now.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can follow my account on Bluesky for updates on Shining Valiant 3, other Valiant Labs models, my open-source datasets, etc: ', 'raw': 'You can follow my account on Bluesky for updates on Shining Valiant 3, other Valiant Labs models, my open-source datasets, etc: '}, {'type': 'link', 'href': 'https://bsky.app/profile/sequelbox.bsky.social', 'raw': 'https://bsky.app/profile/sequelbox.bsky.social'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'back to building :)', 'raw': 'back to building :)'}]","A general FYI that Valiant Labs no longer has an X account. This is a business decision. Many other businesses seem to be making the same decision right now. + +You can follow my account on Bluesky for updates on Shining Valiant 3, other Valiant Labs models, my open-source datasets, etc: https://bsky.app/profile/sequelbox.bsky.social + +back to building :)",[],[],"[{'reaction': '👍', 'users': ['dzydev', 'John6666', 'kylewascher', 'benjamin-paine', 'Ba2han', 'agentlans', 'jomusic'], 'count': 7}, {'reaction': '❤️', 'users': ['zoeywin', 'antonvinny', 'jomusic'], 'count': 3}, {'reaction': '🚀', 'users': ['throwaway932'], 'count': 1}]",2025-01-21 14:54:57,2025-01-21 14:54:57.483,[],/posts/sequelbox/808939774016887,2363,"{'language': 'en', 'probability': 0.9040932059288025}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,475506943926907,"[{'type': 'text', 'value': ""The RAG's in the bag! "", 'raw': ""The RAG's in the bag! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can now use the Synthetic Data Generator with your own domain-specific seed data to generate a dataset for fine-tuning retrieval or reranking models.', 'raw': 'You can now use the Synthetic Data Generator with your own domain-specific seed data to generate a dataset for fine-tuning retrieval or reranking models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://buff.ly/49IDSmd', 'raw': 'https://buff.ly/49IDSmd'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Spaces: ', 'raw': 'Spaces: '}, {'type': 'link', 'href': 'https://buff.ly/3Y1S99z', 'raw': 'https://buff.ly/3Y1S99z'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/sdiazlor/fine-tune-modernbert-for-rag-with-synthetic-data', 'raw': 'https://huggingface.co/blog/sdiazlor/fine-tune-modernbert-for-rag-with-synthetic-data'}]","The RAG's in the bag! + +You can now use the Synthetic Data Generator with your own domain-specific seed data to generate a dataset for fine-tuning retrieval or reranking models. + +GitHub: https://buff.ly/49IDSmd +Spaces: https://buff.ly/3Y1S99z +Blog: https://huggingface.co/blog/sdiazlor/fine-tune-modernbert-for-rag-with-synthetic-data",[],[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'John6666', 'SPICHKIN'], 'count': 3}]",2025-01-21 13:14:02,2025-01-21 19:49:42.108,"[{'_id': '678f31bb84f294b2ba1a092e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/sgXNX3WLscOvgU_Q2XGPq.jpeg', 'fullname': 'Santino Buttazzoni', 'name': 'santibuttazzoni', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/davidberenstein1957/475506943926907,1899,"{'language': 'en', 'probability': 0.6279020309448242}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,303624388330037,"[{'type': 'text', 'value': 'What happened yesterday in the Chinese AI community? 🚀', 'raw': 'What happened yesterday in the Chinese AI community? 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'T2A-01-HD 👉 ', 'raw': 'T2A-01-HD 👉 '}, {'type': 'link', 'href': 'https://hailuo.ai/audio', 'raw': 'https://hailuo.ai/audio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""MiniMax's Text-to-Audio model, now in Hailuo AI, offers 300+ voices in 17+ languages and instant emotional voice cloning."", 'raw': ""MiniMax's Text-to-Audio model, now in Hailuo AI, offers 300+ voices in 17+ languages and instant emotional voice cloning.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tare 👉 ', 'raw': 'Tare 👉 '}, {'type': 'link', 'href': 'https://www.trae.ai/', 'raw': 'https://www.trae.ai/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A new coding tool by Bytedance for professional developers, supporting English & Chinese with free access to Claude 3.5 and GPT-4 for a limited time. ', 'raw': 'A new coding tool by Bytedance for professional developers, supporting English & Chinese with free access to Claude 3.5 and GPT-4 for a limited time. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DeepSeek-R1 Series 👉 ', 'raw': 'DeepSeek-R1 Series 👉 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'deepseek-ai/deepseek-r1-678e1e131c0169c0bc89728d'}, 'url': 'https://huggingface.co/collections/deepseek-ai/deepseek-r1-678e1e131c0169c0bc89728d', 'raw': 'https://huggingface.co/collections/deepseek-ai/deepseek-r1-678e1e131c0169c0bc89728d'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Open-source reasoning models with MIT license by DeepSeek.', 'raw': 'Open-source reasoning models with MIT license by DeepSeek.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kimi K 1.5 👉 ', 'raw': 'Kimi K 1.5 👉 '}, {'type': 'link', 'href': 'https://github.com/MoonshotAI/Kimi-k1.5', 'raw': 'https://github.com/MoonshotAI/Kimi-k1.5'}, {'type': 'text', 'value': ' | ', 'raw': ' | '}, {'type': 'link', 'href': 'https://kimi.ai/', 'raw': 'https://kimi.ai/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'An O1-level multi-modal model by MoonShot AI, utilizing reinforcement learning with long and short-chain-of-thought and supporting up to 128k tokens.', 'raw': 'An O1-level multi-modal model by MoonShot AI, utilizing reinforcement learning with long and short-chain-of-thought and supporting up to 128k tokens.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And today…', 'raw': 'And today…'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hunyuan 3D-2.0 👉 ', 'raw': 'Hunyuan 3D-2.0 👉 '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'tencent/Hunyuan3D-2'}, 'url': 'https://huggingface.co/tencent/Hunyuan3D-2', 'raw': 'https://huggingface.co/tencent/Hunyuan3D-2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A SoTA 3D synthesis system for high-res textured assets by Tencent Hunyuan , with open weights and code! ', 'raw': 'A SoTA 3D synthesis system for high-res textured assets by Tencent Hunyuan , with open weights and code! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Stay tuned for more updates 👉 ', 'raw': 'Stay tuned for more updates 👉 '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'zh-ai-community'}, 'url': 'https://huggingface.co/zh-ai-community', 'raw': 'https://huggingface.co/zh-ai-community', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/Prk2cN2W9azl2SHoxMYVg.png'}]","What happened yesterday in the Chinese AI community? 🚀 + +T2A-01-HD 👉 https://hailuo.ai/audio +MiniMax's Text-to-Audio model, now in Hailuo AI, offers 300+ voices in 17+ languages and instant emotional voice cloning. + +Tare 👉 https://www.trae.ai/ +A new coding tool by Bytedance for professional developers, supporting English & Chinese with free access to Claude 3.5 and GPT-4 for a limited time. + +DeepSeek-R1 Series 👉 https://huggingface.co/collections/deepseek-ai/deepseek-r1-678e1e131c0169c0bc89728d +Open-source reasoning models with MIT license by DeepSeek. + +Kimi K 1.5 👉 https://github.com/MoonshotAI/Kimi-k1.5 | https://kimi.ai/ +An O1-level multi-modal model by MoonShot AI, utilizing reinforcement learning with long and short-chain-of-thought and supporting up to 128k tokens. + +And today… + +Hunyuan 3D-2.0 👉 https://huggingface.co/tencent/Hunyuan3D-2 +A SoTA 3D synthesis system for high-res textured assets by Tencent Hunyuan , with open weights and code! + +Stay tuned for more updates 👉 https://huggingface.co/zh-ai-community",[],[],"[{'reaction': '🚀', 'users': ['davidberenstein1957', 'burtenshaw', 'fifteen42', 'andito', 'roland0822', 'John6666', 'Manteez', 'afkpk'], 'count': 8}, {'reaction': '❤️', 'users': ['burtenshaw', 'kevinktg', 'nguyenmanhd93', 'ryg81', 'charleno'], 'count': 5}, {'reaction': '👍', 'users': ['ZiggyS'], 'count': 1}]",2025-01-21 11:40:49,2025-01-21 11:49:48.138,[],/posts/AdinaY/303624388330037,3325,"{'language': 'en', 'probability': 0.7975327968597412}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,505356500463280,"[{'type': 'text', 'value': '🎨 QR CANVAS: Create Beautiful Custom QR Codes!', 'raw': '🎨 QR CANVAS: Create Beautiful Custom QR Codes!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/QR-Canvas'}, 'url': 'https://huggingface.co/spaces/ginipick/QR-Canvas', 'raw': 'https://huggingface.co/spaces/ginipick/QR-Canvas'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Create your unique QR codes with QR CANVAS - where functionality meets aesthetics. Try it now: QR CANVAS', 'raw': 'Create your unique QR codes with QR CANVAS - where functionality meets aesthetics. Try it now: QR CANVAS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Supported Options', 'raw': '🎯 Supported Options'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 QR Code Types', 'raw': '📱 QR Code Types'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'URL: Website links (auto https:// prefix)', 'raw': 'URL: Website links (auto https:// prefix)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Email: Email address with subject', 'raw': 'Email: Email address with subject'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Phone: International/local numbers', 'raw': 'Phone: International/local numbers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SMS: Text message + phone number', 'raw': 'SMS: Text message + phone number'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'WhatsApp: Messages and numbers', 'raw': 'WhatsApp: Messages and numbers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Location: Latitude/longitude coordinates', 'raw': 'Location: Latitude/longitude coordinates'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Wi-Fi: Network credentials', 'raw': 'Wi-Fi: Network credentials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Text: Plain text content', 'raw': 'Text: Plain text content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'vCard: Digital business cards', 'raw': 'vCard: Digital business cards'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Design Elements', 'raw': '🎨 Design Elements'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Border Decorations', 'raw': 'Border Decorations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Flowers: Pink petals with yellow centers', 'raw': 'Flowers: Pink petals with yellow centers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hearts: Continuous red hearts', 'raw': 'Hearts: Continuous red hearts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Waves: Dual-tone wave patterns', 'raw': 'Waves: Dual-tone wave patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Leaves: Green leaf patterns', 'raw': 'Leaves: Green leaf patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Stars: Golden star decorations', 'raw': 'Stars: Golden star decorations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chains: Gray chain links', 'raw': 'Chains: Gray chain links'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Bubbles: Sky blue bubble effects', 'raw': 'Bubbles: Sky blue bubble effects'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Vines: Green vine patterns', 'raw': 'Vines: Green vine patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Diamonds: Purple diamond shapes', 'raw': 'Diamonds: Purple diamond shapes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lace: Gray lace patterns', 'raw': 'Lace: Gray lace patterns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ Technical Settings', 'raw': '⚙️ Technical Settings'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Error Correction', 'raw': 'Error Correction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Low (7%) to High (30%)', 'raw': 'Low (7%) to High (30%)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Size adjustments (1-30)', 'raw': 'Size adjustments (1-30)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Border width control (0-5)', 'raw': 'Border width control (0-5)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Customization', 'raw': '🎯 Customization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full RGB color picker', 'raw': 'Full RGB color picker'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'QR code and background colors', 'raw': 'QR code and background colors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Real-time preview', 'raw': 'Real-time preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'High-resolution PNG output', 'raw': 'High-resolution PNG output'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💫 Why Choose QR CANVAS?', 'raw': '💫 Why Choose QR CANVAS?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Intuitive interface', 'raw': 'Intuitive interface'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Seamless border integration', 'raw': 'Seamless border integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Professional output quality', 'raw': 'Professional output quality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instant scanning compatibility', 'raw': 'Instant scanning compatibility'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Start creating your stylish QR codes now! Visit our Hugging Face Space 🚀', 'raw': 'Start creating your stylish QR codes now! Visit our Hugging Face Space 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#qrcode #design #customization #webdev #gradio', 'raw': '#qrcode #design #customization #webdev #gradio'}]","🎨 QR CANVAS: Create Beautiful Custom QR Codes! + +https://huggingface.co/spaces/ginipick/QR-Canvas + +Create your unique QR codes with QR CANVAS - where functionality meets aesthetics. Try it now: QR CANVAS + +🎯 Supported Options +📱 QR Code Types +URL: Website links (auto https:// prefix) +Email: Email address with subject +Phone: International/local numbers +SMS: Text message + phone number +WhatsApp: Messages and numbers +Location: Latitude/longitude coordinates +Wi-Fi: Network credentials +Text: Plain text content +vCard: Digital business cards + +🎨 Design Elements +Border Decorations + +Flowers: Pink petals with yellow centers +Hearts: Continuous red hearts +Waves: Dual-tone wave patterns +Leaves: Green leaf patterns +Stars: Golden star decorations +Chains: Gray chain links +Bubbles: Sky blue bubble effects +Vines: Green vine patterns +Diamonds: Purple diamond shapes +Lace: Gray lace patterns + +⚙️ Technical Settings +Error Correction + +Low (7%) to High (30%) +Size adjustments (1-30) +Border width control (0-5) + +🎯 Customization + +Full RGB color picker +QR code and background colors +Real-time preview +High-resolution PNG output + +💫 Why Choose QR CANVAS? + +Intuitive interface +Seamless border integration +Professional output quality +Instant scanning compatibility + +Start creating your stylish QR codes now! Visit our Hugging Face Space 🚀 + +#qrcode #design #customization #webdev #gradio","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/lvoiDIbvf6zjTn7V0_QHg.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/UZ9afLjeT1c49ajkV1PTK.png'}]",[],"[{'reaction': '🔥', 'users': ['openfree', 'seawolf2357', 'adminnews', 'mple33', 'truetekken', 'newyorkcheese', 'aiqcamp', 'fantos', 'fantaxy', 'gunship999', 'immunobiotech', 'aiqtech', 'kolaslab', 'Marco193', 'iky1e', 'akash32bit'], 'count': 16}, {'reaction': '🚀', 'users': ['openfree', 'seawolf2357', 'adminnews', 'mple33', 'truetekken', 'newyorkcheese', 'aiqcamp', 'fantos', 'fantaxy', 'gunship999', 'immunobiotech', 'aiqtech', 'kolaslab', 'John6666'], 'count': 14}, {'reaction': '👀', 'users': ['openfree', 'seawolf2357', 'adminnews', 'mple33', 'newyorkcheese', 'fantos', 'fantaxy', 'gunship999', 'immunobiotech', 'aiqtech', 'kolaslab', 'ginipick', 'aiqcamp'], 'count': 13}, {'reaction': '❤️', 'users': ['openfree', 'seawolf2357', 'adminnews', 'mple33', 'truetekken', 'fantaxy', 'gunship999', 'ginipick', 'aiqcamp', 'Marco193'], 'count': 10}, {'reaction': '😎', 'users': ['openfree', 'seawolf2357', 'ginipick', 'aiqcamp'], 'count': 4}, {'reaction': '🤗', 'users': ['openfree', 'adminnews', 'ginipick', 'aiqcamp'], 'count': 4}, {'reaction': '🧠', 'users': ['openfree', 'ginipick', 'aiqcamp'], 'count': 3}, {'reaction': '➕', 'users': ['openfree', 'ginipick', 'aiqcamp'], 'count': 3}, {'reaction': '🤯', 'users': ['openfree', 'aiqcamp'], 'count': 2}, {'reaction': '😔', 'users': ['openfree', 'aiqcamp'], 'count': 2}, {'reaction': '🤝', 'users': ['openfree', 'aiqcamp'], 'count': 2}, {'reaction': '👍', 'users': ['openfree', 'aiqcamp'], 'count': 2}]",2025-01-21 10:15:09,2025-01-21 10:15:09.472,[],/posts/ginipick/505356500463280,4723,"{'language': 'en', 'probability': 0.662585437297821}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,196178930489825,"[{'type': 'text', 'value': 'Hunyuan 3D 2.0🔥 a synthesis system for high-res textured 3D assets released by Tencent Hunyuan ', 'raw': 'Hunyuan 3D 2.0🔥 a synthesis system for high-res textured 3D assets released by Tencent Hunyuan '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2 key components: Hunyuan3D-DiT (geometry) and Hunyuan3D-Paint (textures) work together, achieving highly realistic 3D results.', 'raw': '2 key components: Hunyuan3D-DiT (geometry) and Hunyuan3D-Paint (textures) work together, achieving highly realistic 3D results.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'tencent/Hunyuan3D-2'}, 'url': 'https://huggingface.co/tencent/Hunyuan3D-2', 'raw': 'https://huggingface.co/tencent/Hunyuan3D-2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo coming soon!', 'raw': 'Demo coming soon!'}, {'type': 'new_line', 'raw': '\n'}]","Hunyuan 3D 2.0🔥 a synthesis system for high-res textured 3D assets released by Tencent Hunyuan + +2 key components: Hunyuan3D-DiT (geometry) and Hunyuan3D-Paint (textures) work together, achieving highly realistic 3D results. + +Model: https://huggingface.co/tencent/Hunyuan3D-2 +Demo coming soon! +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/yv7wrV6-xRPOYgARQVzr2.gif'}]",[],"[{'reaction': '🔥', 'users': ['John6666'], 'count': 1}]",2025-01-21 10:04:22,2025-01-21 10:04:22.544,[],/posts/AdinaY/196178930489825,1324,"{'language': 'en', 'probability': 0.7949992418289185}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674191139776-noauth.png,257.0,Xuan-Son Nguyen,ngxson,282611672514036,"[{'type': 'text', 'value': 'Fun fact: you can get any DeepSeek-R1-Qwen **abliterated** by using one of these LoRA adapters (GGUF available!)', 'raw': 'Fun fact: you can get any DeepSeek-R1-Qwen **abliterated** by using one of these LoRA adapters (GGUF available!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'ngxson/extracted-lora-mergekit-677d5c3eea0b6a7661201846'}, 'url': 'https://huggingface.co/collections/ngxson/extracted-lora-mergekit-677d5c3eea0b6a7661201846', 'raw': 'https://huggingface.co/collections/ngxson/extracted-lora-mergekit-677d5c3eea0b6a7661201846'}]","Fun fact: you can get any DeepSeek-R1-Qwen **abliterated** by using one of these LoRA adapters (GGUF available!) + +https://huggingface.co/collections/ngxson/extracted-lora-mergekit-677d5c3eea0b6a7661201846",[],[],"[{'reaction': '🚀', 'users': ['John6666', 'hulkday'], 'count': 2}, {'reaction': '🔥', 'users': ['John6666', 'hulkday'], 'count': 2}]",2025-01-21 10:02:11,2025-01-21 10:02:11.107,[],/posts/ngxson/282611672514036,1080,"{'language': 'en', 'probability': 0.7325558066368103}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/673ab3647afcea17eb4378fd/YQB6zSH1LPxBMUYayIURi.png,35.0,Loser Cheems,JingzeShi,680838846472678,"[{'type': 'text', 'value': '🤩warmup -> stable -> decay leanring rate scheduler: ', 'raw': '🤩warmup -> stable -> decay leanring rate scheduler: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '😎use the Stable Phase CheckPoints to Continue Training the model on Any New Dataset without spikes of the training!!!', 'raw': '😎use the Stable Phase CheckPoints to Continue Training the model on Any New Dataset without spikes of the training!!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'SmallDoge/Doge-20M-checkpoint'}, 'url': 'https://huggingface.co/SmallDoge/Doge-20M-checkpoint', 'raw': 'https://huggingface.co/SmallDoge/Doge-20M-checkpoint'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'SmallDoge/Doge-60M-checkpoint'}, 'url': 'https://huggingface.co/SmallDoge/Doge-60M-checkpoint', 'raw': 'https://huggingface.co/SmallDoge/Doge-60M-checkpoint'}, {'type': 'new_line', 'raw': '\n'}]","🤩warmup -> stable -> decay leanring rate scheduler: +😎use the Stable Phase CheckPoints to Continue Training the model on Any New Dataset without spikes of the training!!! +https://huggingface.co/SmallDoge/Doge-20M-checkpoint +https://huggingface.co/SmallDoge/Doge-60M-checkpoint +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/673ab3647afcea17eb4378fd/xwfQERdzm6Vip-OtBDQop.png'}]",[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'John6666', 'Yukkkop', 'AtAndDev', 'LoserCheems', 'AlbertShi', 'JingzeShi'], 'count': 7}, {'reaction': '😎', 'users': ['JingzeShi'], 'count': 1}, {'reaction': '👀', 'users': ['JingzeShi'], 'count': 1}, {'reaction': '🤗', 'users': ['JingzeShi'], 'count': 1}]",2025-01-21 08:50:25,2025-01-31 13:44:43.119,"[{'_id': '66a0a9e7e81290f90f93b0a6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66a0a9e7e81290f90f93b0a6/_HAsUv1eh0Im-aYZaa1C0.png', 'fullname': 'BinghengWu', 'name': 'wubingheng', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '673ab3647afcea17eb4378fd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/673ab3647afcea17eb4378fd/YQB6zSH1LPxBMUYayIURi.png', 'fullname': 'Loser Cheems', 'name': 'JingzeShi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 35, 'isFollowing': False}]",/posts/JingzeShi/680838846472678,1728,"{'language': 'en', 'probability': 0.7279482483863831}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1676563169736-noauth.jpeg,3.0,de Rodrigo,de-Rodrigo,547004650779023,"[{'type': 'text', 'value': 'MERIT Dataset 🎒📃🏆 Updates: The Token Classification Version is Now Live on the Hub! ', 'raw': 'MERIT Dataset 🎒📃🏆 Updates: The Token Classification Version is Now Live on the Hub! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This new version extends the previous dataset by providing richer labels that include word bounding boxes alongside the already available images. 🚀', 'raw': 'This new version extends the previous dataset by providing richer labels that include word bounding boxes alongside the already available images. 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We can't wait to see how you use this update! Give it a try, and let us know your thoughts, questions, or any cool projects you build with it. 💡"", 'raw': ""We can't wait to see how you use this update! Give it a try, and let us know your thoughts, questions, or any cool projects you build with it. 💡""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resources:', 'raw': 'Resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dataset: ', 'raw': '- Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'de-Rodrigo/merit'}, 'url': 'https://huggingface.co/datasets/de-Rodrigo/merit', 'raw': 'https://huggingface.co/datasets/de-Rodrigo/merit'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Code and generation pipeline: ', 'raw': '- Code and generation pipeline: '}, {'type': 'link', 'href': 'https://github.com/nachoDRT/MERIT-Dataset', 'raw': 'https://github.com/nachoDRT/MERIT-Dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Paper: ', 'raw': '- Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2409.00447'}, 'url': 'https://huggingface.co/papers/2409.00447', 'raw': 'https://huggingface.co/papers/2409.00447', 'label': 'The MERIT Dataset: Modelling and Efficiently Rendering Interpretable\n Transcripts (2409.00447)'}]","MERIT Dataset 🎒📃🏆 Updates: The Token Classification Version is Now Live on the Hub! + +This new version extends the previous dataset by providing richer labels that include word bounding boxes alongside the already available images. 🚀 + +We can't wait to see how you use this update! Give it a try, and let us know your thoughts, questions, or any cool projects you build with it. 💡 + +Resources: + +- Dataset: https://huggingface.co/datasets/de-Rodrigo/merit +- Code and generation pipeline: https://github.com/nachoDRT/MERIT-Dataset +- Paper: https://huggingface.co/papers/2409.00447","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63ee535a190ddd6214f30dc2/lyCnAPhi8BN3QxglLq3Nu.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-01-21 08:22:22,2025-01-21 08:22:22.852,[],/posts/de-Rodrigo/547004650779023,565,"{'language': 'en', 'probability': 0.8152303099632263}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,352175956353262,"[{'type': 'text', 'value': 'Exciting Research Alert: Multimodal Semantic Retrieval Revolutionizing E-commerce Product Search!', 'raw': 'Exciting Research Alert: Multimodal Semantic Retrieval Revolutionizing E-commerce Product Search!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just came across a fascinating paper from ', 'raw': 'Just came across a fascinating paper from '}, {'type': 'mention', 'user': 'amazon', 'raw': '@amazon'}, {'type': 'text', 'value': ' researchers that tackles a crucial challenge in e-commerce search - integrating both text and image data for better product discovery.', 'raw': ' researchers that tackles a crucial challenge in e-commerce search - integrating both text and image data for better product discovery.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Key Innovations', 'raw': '>> Key Innovations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The researchers developed two groundbreaking architectures:', 'raw': 'The researchers developed two groundbreaking architectures:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- A 4-tower multimodal model combining BERT and CLIP for processing both text and images', 'raw': '- A 4-tower multimodal model combining BERT and CLIP for processing both text and images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- A streamlined 3-tower model that achieves comparable performance with reduced complexity', 'raw': '- A streamlined 3-tower model that achieves comparable performance with reduced complexity'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Technical Deep Dive', 'raw': '>> Technical Deep Dive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The system leverages dual-encoder architecture with some impressive components:', 'raw': 'The system leverages dual-encoder architecture with some impressive components:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Bi-encoder BERT model for processing text queries and product descriptions', 'raw': '- Bi-encoder BERT model for processing text queries and product descriptions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Visual transformers from CLIP for image processing', 'raw': '- Visual transformers from CLIP for image processing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Advanced fusion techniques including concatenation and MLP-based approaches', 'raw': '- Advanced fusion techniques including concatenation and MLP-based approaches'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cosine similarity scoring for efficient large-scale retrieval', 'raw': '- Cosine similarity scoring for efficient large-scale retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Real-world Impact', 'raw': '>> Real-world Impact'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The results are remarkable:', 'raw': 'The results are remarkable:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Up to 78.6% recall@100 for product retrieval', 'raw': '- Up to 78.6% recall@100 for product retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Over 50% exact match precision', 'raw': '- Over 50% exact match precision'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Significant reduction in irrelevant results to just 11.9%', 'raw': '- Significant reduction in irrelevant results to just 11.9%'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Industry Applications', 'raw': '>> Industry Applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This research has major implications for:', 'raw': 'This research has major implications for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- E-commerce search optimization', 'raw': '- E-commerce search optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Visual product discovery', 'raw': '- Visual product discovery'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Large-scale retrieval systems', 'raw': '- Large-scale retrieval systems'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cross-modal product recommendations', 'raw': '- Cross-modal product recommendations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""What's particularly impressive is how the system handles millions of products while maintaining computational efficiency through smart architectural choices."", 'raw': ""What's particularly impressive is how the system handles millions of products while maintaining computational efficiency through smart architectural choices.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This work represents a significant step forward in making online shopping more intuitive and accurate. The researchers from Amazon have demonstrated that combining visual and textual information can dramatically improve search relevance while maintaining scalability.', 'raw': 'This work represents a significant step forward in making online shopping more intuitive and accurate. The researchers from Amazon have demonstrated that combining visual and textual information can dramatically improve search relevance while maintaining scalability.'}]","Exciting Research Alert: Multimodal Semantic Retrieval Revolutionizing E-commerce Product Search! + +Just came across a fascinating paper from @amazon researchers that tackles a crucial challenge in e-commerce search - integrating both text and image data for better product discovery. + +>> Key Innovations +The researchers developed two groundbreaking architectures: +- A 4-tower multimodal model combining BERT and CLIP for processing both text and images +- A streamlined 3-tower model that achieves comparable performance with reduced complexity + +>> Technical Deep Dive +The system leverages dual-encoder architecture with some impressive components: +- Bi-encoder BERT model for processing text queries and product descriptions +- Visual transformers from CLIP for image processing +- Advanced fusion techniques including concatenation and MLP-based approaches +- Cosine similarity scoring for efficient large-scale retrieval + +>> Real-world Impact +The results are remarkable: +- Up to 78.6% recall@100 for product retrieval +- Over 50% exact match precision +- Significant reduction in irrelevant results to just 11.9% + +>> Industry Applications +This research has major implications for: +- E-commerce search optimization +- Visual product discovery +- Large-scale retrieval systems +- Cross-modal product recommendations + +What's particularly impressive is how the system handles millions of products while maintaining computational efficiency through smart architectural choices. + +This work represents a significant step forward in making online shopping more intuitive and accurate. The researchers from Amazon have demonstrated that combining visual and textual information can dramatically improve search relevance while maintaining scalability.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/d-r1Gji9VHubxnYl-1kHh.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}]",2025-01-21 03:05:30,2025-01-21 03:05:30.611,[],/posts/singhsidhukuldeep/352175956353262,572,"{'language': 'en', 'probability': 0.848569393157959}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg,3938.0,chansung park,chansung,165424668459034,"[{'type': 'text', 'value': 'Simple Summarization on DeepSeek-R1 from DeepSeek AI', 'raw': 'Simple Summarization on DeepSeek-R1 from DeepSeek AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The RL stage is very important. ', 'raw': 'The RL stage is very important. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '↳ However, it is difficult to create a truly helpful AI for people solely through RL. ', 'raw': '↳ However, it is difficult to create a truly helpful AI for people solely through RL. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '↳ So, we applied a learning pipeline consisting of four stages: providing a good starting point, reasoning RL, SFT, and safety RL, and achieved performance comparable to o1. ', 'raw': '↳ So, we applied a learning pipeline consisting of four stages: providing a good starting point, reasoning RL, SFT, and safety RL, and achieved performance comparable to o1. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '↳ Simply fine-tuning other open models with the data generated by R1-Zero (distillation) resulted in performance comparable to o1-mini.', 'raw': '↳ Simply fine-tuning other open models with the data generated by R1-Zero (distillation) resulted in performance comparable to o1-mini.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Of course, this is just a brief overview and may not be of much help. All models are accessible on Hugging Face, and the paper can be read through the GitHub repository.', 'raw': 'Of course, this is just a brief overview and may not be of much help. All models are accessible on Hugging Face, and the paper can be read through the GitHub repository.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'deepseek-ai'}, 'url': 'https://huggingface.co/deepseek-ai', 'raw': 'https://huggingface.co/deepseek-ai', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538815d1bdb3c40db94fbfa/xMBly9PUMphrFVMxLX4kq.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'link', 'href': 'https://github.com/deepseek-ai/DeepSeek-R1', 'raw': 'https://github.com/deepseek-ai/DeepSeek-R1'}, {'type': 'new_line', 'raw': '\n'}]","Simple Summarization on DeepSeek-R1 from DeepSeek AI + +The RL stage is very important. +↳ However, it is difficult to create a truly helpful AI for people solely through RL. +↳ So, we applied a learning pipeline consisting of four stages: providing a good starting point, reasoning RL, SFT, and safety RL, and achieved performance comparable to o1. +↳ Simply fine-tuning other open models with the data generated by R1-Zero (distillation) resulted in performance comparable to o1-mini. + +Of course, this is just a brief overview and may not be of much help. All models are accessible on Hugging Face, and the paper can be read through the GitHub repository. + + +Model: https://huggingface.co/deepseek-ai +Paper: https://github.com/deepseek-ai/DeepSeek-R1 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60d3b57ad7b174177faabd6e/Qg-8A8T0lTis5NC_p2Kup.jpeg'}]",[],"[{'reaction': '👍', 'users': ['chansung', 'theainerd', 'sudanenator', 'tanu360', 'UnstableLlama', 'John6666', 'AtAndDev'], 'count': 7}, {'reaction': '🚀', 'users': ['John6666', 'Ansari123', 'AtAndDev'], 'count': 3}, {'reaction': '🔥', 'users': ['joseEjmendez', 'issaco', 'AtAndDev'], 'count': 3}]",2025-01-21 01:55:06,2025-01-21 05:34:09.765,"[{'_id': '6033c55f60e3dd96631c908d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6033c55f60e3dd96631c908d/jy7cHHCBhnlzHKGbXIbj0.jpeg', 'fullname': 'Shyam Sunder Kumar', 'name': 'theainerd', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 15, 'isFollowing': False}]",/posts/chansung/165424668459034,2092,"{'language': 'en', 'probability': 0.905510425567627}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1667002643224-604a5184dca2c7ac7508b849.jpeg,287.0,Ross Wightman,rwightman,238520709760123,"[{'type': 'text', 'value': ""I re-worked the JuptyerLab Space template recently. It's optimized for "", 'raw': ""I re-worked the JuptyerLab Space template recently. It's optimized for ""}, {'type': 'inline_code', 'code': 'timm', 'raw': '`timm`'}, {'type': 'text', 'value': ' use, but will work great with ', 'raw': ' use, but will work great with '}, {'type': 'inline_code', 'code': 'transformers', 'raw': '`transformers`'}, {'type': 'text', 'value': ' and other libs. Updated the base image, Python 3.12, Pillow-SIMD before better CPU use with image preprocessing, and made a number of other tweaks. From the Jupyter launcher you can run the terminal and setup a ', 'raw': ' and other libs. Updated the base image, Python 3.12, Pillow-SIMD before better CPU use with image preprocessing, and made a number of other tweaks. From the Jupyter launcher you can run the terminal and setup a '}, {'type': 'inline_code', 'code': 'timm', 'raw': '`timm`'}, {'type': 'text', 'value': ' environment in moments with ', 'raw': ' environment in moments with '}, {'type': 'inline_code', 'code': 'setup_timm_dev', 'raw': '`setup_timm_dev`'}, {'type': 'text', 'value': ' or ', 'raw': ' or '}, {'type': 'inline_code', 'code': 'setup_timm_scripts', 'raw': '`setup_timm_scripts`'}, {'type': 'text', 'value': ' helpers. Give it a try, ', 'raw': ' helpers. Give it a try, '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'timm/jupyterlab-timm'}, 'url': 'https://huggingface.co/spaces/timm/jupyterlab-timm', 'raw': 'https://huggingface.co/spaces/timm/jupyterlab-timm'}]","I re-worked the JuptyerLab Space template recently. It's optimized for `timm` use, but will work great with `transformers` and other libs. Updated the base image, Python 3.12, Pillow-SIMD before better CPU use with image preprocessing, and made a number of other tweaks. From the Jupyter launcher you can run the terminal and setup a `timm` environment in moments with `setup_timm_dev` or `setup_timm_scripts` helpers. Give it a try, https://huggingface.co/spaces/timm/jupyterlab-timm",[],[],"[{'reaction': '👍', 'users': ['John6666', 'Tonic', 'joseph-bou'], 'count': 3}]",2025-01-17 18:31:43,2025-01-17 18:31:43.774,[],/posts/rwightman/238520709760123,1779,"{'language': 'en', 'probability': 0.8244147300720215}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6404403bad54665351d42ee2/TCC5Na8ojtSL1MJAzTn3b.png,38.0,zamal_,zamal,726350795126069,"[{'type': 'resource', 'resource': {'type': 'space', 'id': 'zamal/Multimodal-Chat-PDF'}, 'url': 'https://huggingface.co/spaces/zamal/Multimodal-Chat-PDF', 'raw': 'https://huggingface.co/spaces/zamal/Multimodal-Chat-PDF'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Introducing Chat PDF Multimodal 💬', 'raw': '🚀 Introducing Chat PDF Multimodal 💬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Interact with your PDF documents like never before! 🤯', 'raw': 'Interact with your PDF documents like never before! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Extract text & images, then ask context-aware questions based on both. Powered by RAG techniques & multimodal LLMs. Perfect for studying, research & more! 📝👀', 'raw': 'Extract text & images, then ask context-aware questions based on both. Powered by RAG techniques & multimodal LLMs. Perfect for studying, research & more! 📝👀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out now!!!! ✍️', 'raw': 'Try it out now!!!! ✍️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#LlavaNext #MultimodalAI #Transformers', 'raw': '#LlavaNext #MultimodalAI #Transformers'}]","https://huggingface.co/spaces/zamal/Multimodal-Chat-PDF + +🚀 Introducing Chat PDF Multimodal 💬 + +Interact with your PDF documents like never before! 🤯 +Extract text & images, then ask context-aware questions based on both. Powered by RAG techniques & multimodal LLMs. Perfect for studying, research & more! 📝👀 +Try it out now!!!! ✍️ + +#LlavaNext #MultimodalAI #Transformers","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6404403bad54665351d42ee2/6UZLbHgGLeEdVyKW2TOjU.webp'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'zamal'], 'count': 2}, {'reaction': '🤯', 'users': ['HaarisIqubal'], 'count': 1}]",2025-01-17 17:26:59,2025-01-17 17:30:25.464,[],/posts/zamal/726350795126069,1511,"{'language': 'en', 'probability': 0.7915974855422974}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64838b28c235ef76b63e4999/ZhQCYoU3vps71Ag7Jezj6.jpeg,971.0,Ksenia Se,Kseniase,333853610289345,"[{'type': 'text', 'value': 'Today, we spoke with Snowflake’s AI Research Team Leads, Yuxiong He and Samyam Rajbhandari (', 'raw': 'Today, we spoke with Snowflake’s AI Research Team Leads, Yuxiong He and Samyam Rajbhandari ('}, {'type': 'mention', 'user': 'samyam', 'raw': '@samyam'}, {'type': 'text', 'value': ') (he is also one the researchers behind ', 'raw': ') (he is also one the researchers behind '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.08671'}, 'url': 'https://huggingface.co/papers/2401.08671', 'raw': 'https://huggingface.co/papers/2401.08671', 'label': 'DeepSpeed-FastGen: High-throughput Text Generation for LLMs via MII and\n DeepSpeed-Inference (2401.08671)'}, {'type': 'text', 'value': ' and other DeepSpeed papers)', 'raw': ' and other DeepSpeed papers)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collaborating with their co-authors to reduce inference costs for enterprise-specific tasks, they observed that inputs are often significantly larger than outputs. This is because it’s in the nature of enterprises to analyze enormous amounts of information trying to extract valuable insights, which are much shorter. To address this, they developed SwiftKV ', 'raw': 'Collaborating with their co-authors to reduce inference costs for enterprise-specific tasks, they observed that inputs are often significantly larger than outputs. This is because it’s in the nature of enterprises to analyze enormous amounts of information trying to extract valuable insights, which are much shorter. To address this, they developed SwiftKV '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2410.03960'}, 'url': 'https://huggingface.co/papers/2410.03960', 'raw': 'https://huggingface.co/papers/2410.03960', 'label': 'SwiftKV: Fast Prefill-Optimized Inference with Knowledge-Preserving\n Model Transformation (2410.03960)'}, {'type': 'text', 'value': ', an optimization that reduces LLM inference costs by up to 75% for Meta Llama LLMs, enhancing efficiency and performance in enterprise AI tasks. ', 'raw': ', an optimization that reduces LLM inference costs by up to 75% for Meta Llama LLMs, enhancing efficiency and performance in enterprise AI tasks. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Today they are open-sourcing SwiftKV (', 'raw': 'Today they are open-sourcing SwiftKV ('}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Snowflake/Llama-3.1-SwiftKV-8B-Instruct'}, 'url': 'https://huggingface.co/Snowflake/Llama-3.1-SwiftKV-8B-Instruct', 'raw': 'https://huggingface.co/Snowflake/Llama-3.1-SwiftKV-8B-Instruct'}, {'type': 'text', 'value': ') and ArcticTrainging Platform. ', 'raw': ') and ArcticTrainging Platform. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In our new episode ""15 minutes with a Researcher"" they explain how SwiftKV works, its applicability to other architectures, its limitations, and additional methods to further reduce computation costs in inference.', 'raw': 'In our new episode ""15 minutes with a Researcher"" they explain how SwiftKV works, its applicability to other architectures, its limitations, and additional methods to further reduce computation costs in inference.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Watch the full 15 min interview here (', 'raw': 'Watch the full 15 min interview here ('}, {'type': 'link', 'href': 'https://youtu.be/9x1k7eXe-6Q?si=4_HQOyi1CPHgvlrx', 'raw': 'https://youtu.be/9x1k7eXe-6Q?si=4_HQOyi1CPHgvlrx'}, {'type': 'text', 'value': ') ', 'raw': ') '}, {'type': 'new_line', 'raw': '\n'}]","Today, we spoke with Snowflake’s AI Research Team Leads, Yuxiong He and Samyam Rajbhandari (@samyam) (he is also one the researchers behind https://huggingface.co/papers/2401.08671 and other DeepSpeed papers) + +Collaborating with their co-authors to reduce inference costs for enterprise-specific tasks, they observed that inputs are often significantly larger than outputs. This is because it’s in the nature of enterprises to analyze enormous amounts of information trying to extract valuable insights, which are much shorter. To address this, they developed SwiftKV https://huggingface.co/papers/2410.03960, an optimization that reduces LLM inference costs by up to 75% for Meta Llama LLMs, enhancing efficiency and performance in enterprise AI tasks. + +Today they are open-sourcing SwiftKV (https://huggingface.co/Snowflake/Llama-3.1-SwiftKV-8B-Instruct) and ArcticTrainging Platform. +In our new episode ""15 minutes with a Researcher"" they explain how SwiftKV works, its applicability to other architectures, its limitations, and additional methods to further reduce computation costs in inference. +Watch the full 15 min interview here (https://youtu.be/9x1k7eXe-6Q?si=4_HQOyi1CPHgvlrx) +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64838b28c235ef76b63e4999/FL1lcvZNcL39MJ39KhzzQ.qt'}]","[{'_id': '63238e79d444e1d90888ea03', 'avatarUrl': '/avatars/cd0accb5ddec4f2ae23c94da4094b5e5.svg', 'fullname': 'Samyam Rajbhandari', 'name': 'samyam', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}]","[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-01-17 16:54:27,2025-01-17 16:55:04.493,[],/posts/Kseniase/333853610289345,768,"{'language': 'en', 'probability': 0.9121742844581604}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620a77b7dbba8fc1fbb8bdb4/ZRW2pH9Iawj700OyLpJl8.png,158.0,Florent Gbelidji,florentgbelidji,768721082554371,"[{'type': 'text', 'value': '𝗣𝗹𝗮𝗻𝗻𝗶𝗻𝗴 𝗬𝗼𝘂𝗿 𝗡𝗲𝘅𝘁 𝗦𝗸𝗶 𝗔𝗱𝘃𝗲𝗻𝘁𝘂𝗿𝗲 𝗝𝘂𝘀𝘁 𝗚𝗼𝘁 𝗦𝗺𝗮𝗿𝘁𝗲𝗿: 𝗜𝗻𝘁𝗿𝗼𝗱𝘂𝗰𝗶𝗻𝗴 𝗔𝗹𝗽𝗶𝗻𝗲 𝗔𝗴𝗲𝗻𝘁!🏔️⛷️', 'raw': '𝗣𝗹𝗮𝗻𝗻𝗶𝗻𝗴 𝗬𝗼𝘂𝗿 𝗡𝗲𝘅𝘁 𝗦𝗸𝗶 𝗔𝗱𝘃𝗲𝗻𝘁𝘂𝗿𝗲 𝗝𝘂𝘀𝘁 𝗚𝗼𝘁 𝗦𝗺𝗮𝗿𝘁𝗲𝗿: 𝗜𝗻𝘁𝗿𝗼𝗱𝘂𝗰𝗶𝗻𝗴 𝗔𝗹𝗽𝗶𝗻𝗲 𝗔𝗴𝗲𝗻𝘁!🏔️⛷️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With the big hype around AI agents these days, I couldn’t stop thinking about how AI agents could truly enhance real-world activities.', 'raw': 'With the big hype around AI agents these days, I couldn’t stop thinking about how AI agents could truly enhance real-world activities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What sort of applications could we build with those AI agents: agentic RAG? self-correcting text-to-sql? Nah, boring…', 'raw': 'What sort of applications could we build with those AI agents: agentic RAG? self-correcting text-to-sql? Nah, boring…'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Passionate about outdoors, I’ve always dreamed of a tool that could simplify planning mountain trips while accounting for all potential risks. That’s why I built 𝗔𝗹𝗽𝗶𝗻𝗲 𝗔𝗴𝗲𝗻𝘁, a smart assistant designed to help you plan safe and enjoyable itineraries in the French Alps and Pyrenees.', 'raw': 'Passionate about outdoors, I’ve always dreamed of a tool that could simplify planning mountain trips while accounting for all potential risks. That’s why I built 𝗔𝗹𝗽𝗶𝗻𝗲 𝗔𝗴𝗲𝗻𝘁, a smart assistant designed to help you plan safe and enjoyable itineraries in the French Alps and Pyrenees.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Built using Hugging Face's 𝘀𝗺𝗼𝗹𝗮𝗴𝗲𝗻𝘁𝘀 library, Alpine Agent combines the power of AI with trusted resources like 𝘚𝘬𝘪𝘵𝘰𝘶𝘳.𝘧𝘳 ("", 'raw': ""Built using Hugging Face's 𝘀𝗺𝗼𝗹𝗮𝗴𝗲𝗻𝘁𝘀 library, Alpine Agent combines the power of AI with trusted resources like 𝘚𝘬𝘪𝘵𝘰𝘶𝘳.𝘧𝘳 (""}, {'type': 'link', 'href': 'https://skitour.fr/', 'raw': 'https://skitour.fr/'}, {'type': 'text', 'value': ') and METEO FRANCE. Whether it’s suggesting a route with moderate difficulty or analyzing avalanche risks and weather conditions, this agent dynamically integrates data to deliver personalized recommendations.', 'raw': ') and METEO FRANCE. Whether it’s suggesting a route with moderate difficulty or analyzing avalanche risks and weather conditions, this agent dynamically integrates data to deliver personalized recommendations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In my latest blog post, I share how I developed this project—from defining tools and integrating APIs to selecting the best LLMs like 𝘘𝘸𝘦𝘯2.5-𝘊𝘰𝘥𝘦𝘳-32𝘉-𝘐𝘯𝘴𝘵𝘳𝘶𝘤𝘵, 𝘓𝘭𝘢𝘮𝘢-3.3-70𝘉-𝘐𝘯𝘴𝘵𝘳𝘶𝘤𝘵, or 𝘎𝘗𝘛-4.', 'raw': 'In my latest blog post, I share how I developed this project—from defining tools and integrating APIs to selecting the best LLMs like 𝘘𝘸𝘦𝘯2.5-𝘊𝘰𝘥𝘦𝘳-32𝘉-𝘐𝘯𝘴𝘵𝘳𝘶𝘤𝘵, 𝘓𝘭𝘢𝘮𝘢-3.3-70𝘉-𝘐𝘯𝘴𝘵𝘳𝘶𝘤𝘵, or 𝘎𝘗𝘛-4.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⛷️ Curious how AI can enhance adventure planning?\u2028Try the app and share your thoughts: ', 'raw': '⛷️ Curious how AI can enhance adventure planning?\u2028Try the app and share your thoughts: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'florentgbelidji/alpine-agent'}, 'url': 'https://huggingface.co/spaces/florentgbelidji/alpine-agent', 'raw': 'https://huggingface.co/spaces/florentgbelidji/alpine-agent'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Want to build your own agents? Whether for cooking, sports training, or other passions, the possibilities are endless. Check out the blog post to learn more: ', 'raw': '👉 Want to build your own agents? Whether for cooking, sports training, or other passions, the possibilities are endless. Check out the blog post to learn more: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/florentgbelidji/alpine-agent', 'raw': 'https://huggingface.co/blog/florentgbelidji/alpine-agent'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Many thanks to ', 'raw': 'Many thanks to '}, {'type': 'mention', 'user': 'm-ric', 'raw': '@m-ric'}, {'type': 'text', 'value': ' for helping on building this tool with smolagents!', 'raw': ' for helping on building this tool with smolagents!'}, {'type': 'new_line', 'raw': '\n'}]","𝗣𝗹𝗮𝗻𝗻𝗶𝗻𝗴 𝗬𝗼𝘂𝗿 𝗡𝗲𝘅𝘁 𝗦𝗸𝗶 𝗔𝗱𝘃𝗲𝗻𝘁𝘂𝗿𝗲 𝗝𝘂𝘀𝘁 𝗚𝗼𝘁 𝗦𝗺𝗮𝗿𝘁𝗲𝗿: 𝗜𝗻𝘁𝗿𝗼𝗱𝘂𝗰𝗶𝗻𝗴 𝗔𝗹𝗽𝗶𝗻𝗲 𝗔𝗴𝗲𝗻𝘁!🏔️⛷️ + +With the big hype around AI agents these days, I couldn’t stop thinking about how AI agents could truly enhance real-world activities. +What sort of applications could we build with those AI agents: agentic RAG? self-correcting text-to-sql? Nah, boring… + +Passionate about outdoors, I’ve always dreamed of a tool that could simplify planning mountain trips while accounting for all potential risks. That’s why I built 𝗔𝗹𝗽𝗶𝗻𝗲 𝗔𝗴𝗲𝗻𝘁, a smart assistant designed to help you plan safe and enjoyable itineraries in the French Alps and Pyrenees. + +Built using Hugging Face's 𝘀𝗺𝗼𝗹𝗮𝗴𝗲𝗻𝘁𝘀 library, Alpine Agent combines the power of AI with trusted resources like 𝘚𝘬𝘪𝘵𝘰𝘶𝘳.𝘧𝘳 (https://skitour.fr/) and METEO FRANCE. Whether it’s suggesting a route with moderate difficulty or analyzing avalanche risks and weather conditions, this agent dynamically integrates data to deliver personalized recommendations. + +In my latest blog post, I share how I developed this project—from defining tools and integrating APIs to selecting the best LLMs like 𝘘𝘸𝘦𝘯2.5-𝘊𝘰𝘥𝘦𝘳-32𝘉-𝘐𝘯𝘴𝘵𝘳𝘶𝘤𝘵, 𝘓𝘭𝘢𝘮𝘢-3.3-70𝘉-𝘐𝘯𝘴𝘵𝘳𝘶𝘤𝘵, or 𝘎𝘗𝘛-4. + +⛷️ Curious how AI can enhance adventure planning?
Try the app and share your thoughts: https://huggingface.co/spaces/florentgbelidji/alpine-agent + +👉 Want to build your own agents? Whether for cooking, sports training, or other passions, the possibilities are endless. Check out the blog post to learn more: https://huggingface.co/blog/florentgbelidji/alpine-agent + +Many thanks to @m-ric for helping on building this tool with smolagents! +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620a77b7dbba8fc1fbb8bdb4/amSosxjR5NBhD0zQOWgi8.mp4'}]","[{'_id': '63d10d4e8eaa4831005e92b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg', 'fullname': 'Aymeric Roucher', 'name': 'm-ric', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1652}]","[{'reaction': '🔥', 'users': ['m-ric', 'John6666', 'roland0822', 'theainerd', 'AtAndDev', 'julien-c'], 'count': 6}]",2025-01-17 16:31:59,2025-01-19 17:18:17.052,"[{'_id': '67861504db9a0895b171413a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/bVcVYeEGJLan9MIU8_n95.png', 'fullname': 'Mehta', 'name': 'mehtarohan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/florentgbelidji/768721082554371,1681,"{'language': 'en', 'probability': 0.8405038118362427}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,588725362884904,"[{'type': 'text', 'value': 'Most Powerful Vision Model CogVLM 2 now works amazing on Windows with new Triton pre-compiled wheels - 19 Examples - Locally tested with 4-bit quantization - Second example is really wild - Can be used for image captioning or any image vision task', 'raw': 'Most Powerful Vision Model CogVLM 2 now works amazing on Windows with new Triton pre-compiled wheels - 19 Examples - Locally tested with 4-bit quantization - Second example is really wild - Can be used for image captioning or any image vision task'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The APP and the installers : ', 'raw': 'The APP and the installers : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/120193330', 'raw': 'https://www.patreon.com/posts/120193330'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check below screenshots to see how to use it', 'raw': 'Check below screenshots to see how to use it'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Currently the APP works amazing with 4-bit quantization very fast', 'raw': 'Currently the APP works amazing with 4-bit quantization very fast'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I am searching to lower VRAM usage even further with like adding CPU-Offloading and other stuff if possible', 'raw': 'I am searching to lower VRAM usage even further with like adding CPU-Offloading and other stuff if possible'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Previously we were lacking Triton but it now works perfect', 'raw': 'Previously we were lacking Triton but it now works perfect'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My installer installs into a Python 3.10 VENV completely isolated and clean', 'raw': 'My installer installs into a Python 3.10 VENV completely isolated and clean'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can see entire APP and installer source code', 'raw': 'You can see entire APP and installer source code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you get Triton error make sure to delete your Triton cache after installing the app like below', 'raw': 'If you get Triton error make sure to delete your Triton cache after installing the app like below'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'C:\\Users\\Furkan.triton', 'raw': 'C:\\Users\\Furkan.triton'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hugging Face repo with sample code : ', 'raw': 'Hugging Face repo with sample code : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'THUDM/cogvlm2-llama3-chat-19B'}, 'url': 'https://huggingface.co/THUDM/cogvlm2-llama3-chat-19B', 'raw': 'https://huggingface.co/THUDM/cogvlm2-llama3-chat-19B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub repo : ', 'raw': 'GitHub repo : '}, {'type': 'link', 'href': 'https://github.com/THUDM/CogVLM2', 'raw': 'https://github.com/THUDM/CogVLM2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Triton Windows : ', 'raw': 'Triton Windows : '}, {'type': 'link', 'href': 'https://github.com/woct0rdho/triton-windows/releases', 'raw': 'https://github.com/woct0rdho/triton-windows/releases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Most Powerful Vision Model CogVLM 2 now works amazing on Windows with new Triton pre-compiled wheels - 19 Examples - Locally tested with 4-bit quantization - Second example is really wild - Can be used for image captioning or any image vision task + +The APP and the installers : https://www.patreon.com/posts/120193330 + +Check below screenshots to see how to use it + +Currently the APP works amazing with 4-bit quantization very fast + +I am searching to lower VRAM usage even further with like adding CPU-Offloading and other stuff if possible + +Previously we were lacking Triton but it now works perfect + +My installer installs into a Python 3.10 VENV completely isolated and clean + +You can see entire APP and installer source code + +If you get Triton error make sure to delete your Triton cache after installing the app like below + +C:\Users\Furkan.triton + +Hugging Face repo with sample code : https://huggingface.co/THUDM/cogvlm2-llama3-chat-19B + +GitHub repo : https://github.com/THUDM/CogVLM2 + +Triton Windows : https://github.com/woct0rdho/triton-windows/releases + + + + + + + + + + + + + + + + + + + + + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/8WNReacxTUjz1FM1_24lk.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/CK42weG5uJNnAy1CvW1T4.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/LQH_t4dhDhQD2Q3Oqj1C2.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/pWv23PFMGgKkzMEPy1JNw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/v9l8gH2BrI-u1k77lwoQD.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/V9z_aitDVlEFawD-hIs6o.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/q25uQUQevALIVjFPrHt83.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/50CpLb3woVxDEIG_ceKEK.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Dg2SMKtxw2oRy4oYQgdQ3.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/TcPldfRNnXT573vi1gjoo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/-kbcJx1VsDc7PZC7H6Gpp.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/CKmPw5Uj8vajhqAZNMq4G.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Pp5giQey-VJBn1VElqtkZ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/6mLZkCfdL8DCYsj4j8_sH.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/qxZ6Pbk6rbT7ymfoG-Ij2.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/D5AKn_haD2rRkAI5C3koA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/zHwmyYVSv8Hcrr1x0Eb80.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/SaGk1q1BsA9wx4uwbPq33.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/W0vDVwZln2AHdfRYs8L2H.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/gOfuF0QK3zBuaH3Idq1lE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Gbper6_6uWQAL58AmzbrF.png'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'roland0822', 'introvoyz041', 'AtAndDev'], 'count': 4}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-01-17 16:01:58,2025-01-17 16:01:58.327,[],/posts/MonsterMMORPG/588725362884904,1346,"{'language': 'en', 'probability': 0.8356366157531738}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1652114014657-5edc0500968f6028e0559ff8.jpeg,52.0,Jack Morris,jxm,666693214502175,"[{'type': 'text', 'value': ' New state-of-the-art BERT-size retrieval model: *cde-small-v2* 🥳🍾', 'raw': ' New state-of-the-art BERT-size retrieval model: *cde-small-v2* 🥳🍾'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hi everyone! We at Cornell are releasing a new retrieval model this week. It uses the contextual embeddings framework, is based on ModernBERT backbone, and gets state-of-the-art results on the MTEB benchmark for its model size (140M parameters). cde-small-v2 gets an average score of 65.6 across the 56 datasets and sees improvements from our previous model in *every* task domain (retrieval, classification, etc.).', 'raw': 'Hi everyone! We at Cornell are releasing a new retrieval model this week. It uses the contextual embeddings framework, is based on ModernBERT backbone, and gets state-of-the-art results on the MTEB benchmark for its model size (140M parameters). cde-small-v2 gets an average score of 65.6 across the 56 datasets and sees improvements from our previous model in *every* task domain (retrieval, classification, etc.).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We made a lot of changes to make this model work. First of all, ModernBERT has a better tokenizer, which probably helped this work out-of-the-box. We also followed the principles from the CDE paper and used harder clusters and better hard-negative filtering, which showed a small performance improvement. And we made a few small changes that have been shown to work on the larger models: we disabled weight decay, masked out the prefix tokens during pooling, and added a residual connection from the first-stage to the second-stage for better gradient flow.', 'raw': 'We made a lot of changes to make this model work. First of all, ModernBERT has a better tokenizer, which probably helped this work out-of-the-box. We also followed the principles from the CDE paper and used harder clusters and better hard-negative filtering, which showed a small performance improvement. And we made a few small changes that have been shown to work on the larger models: we disabled weight decay, masked out the prefix tokens during pooling, and added a residual connection from the first-stage to the second-stage for better gradient flow.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're still looking for a computer sponsor to help us scale CDE to larger models. Since it's now state-of-the-art at the 100M parameter scale, it seems to be a reasonable bet that we could train a state-of-the-art large model if we had the GPUs. If you're interested in helping with this, please reach out!"", 'raw': ""We're still looking for a computer sponsor to help us scale CDE to larger models. Since it's now state-of-the-art at the 100M parameter scale, it seems to be a reasonable bet that we could train a state-of-the-art large model if we had the GPUs. If you're interested in helping with this, please reach out!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's a link to the model: "", 'raw': ""Here's a link to the model: ""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'jxm/cde-small-v2'}, 'url': 'https://huggingface.co/jxm/cde-small-v2', 'raw': 'https://huggingface.co/jxm/cde-small-v2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""And here's a link to the paper: "", 'raw': ""And here's a link to the paper: ""}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2410.02525'}, 'url': 'https://huggingface.co/papers/2410.02525', 'raw': 'https://huggingface.co/papers/2410.02525', 'label': 'Contextual Document Embeddings (2410.02525)'}, {'type': 'new_line', 'raw': '\n'}]"," New state-of-the-art BERT-size retrieval model: *cde-small-v2* 🥳🍾 + +Hi everyone! We at Cornell are releasing a new retrieval model this week. It uses the contextual embeddings framework, is based on ModernBERT backbone, and gets state-of-the-art results on the MTEB benchmark for its model size (140M parameters). cde-small-v2 gets an average score of 65.6 across the 56 datasets and sees improvements from our previous model in *every* task domain (retrieval, classification, etc.). + +We made a lot of changes to make this model work. First of all, ModernBERT has a better tokenizer, which probably helped this work out-of-the-box. We also followed the principles from the CDE paper and used harder clusters and better hard-negative filtering, which showed a small performance improvement. And we made a few small changes that have been shown to work on the larger models: we disabled weight decay, masked out the prefix tokens during pooling, and added a residual connection from the first-stage to the second-stage for better gradient flow. + +We're still looking for a computer sponsor to help us scale CDE to larger models. Since it's now state-of-the-art at the 100M parameter scale, it seems to be a reasonable bet that we could train a state-of-the-art large model if we had the GPUs. If you're interested in helping with this, please reach out! + +Here's a link to the model: https://huggingface.co/jxm/cde-small-v2 +And here's a link to the paper: https://huggingface.co/papers/2410.02525 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5edc0500968f6028e0559ff8/n6G0fGQk8I0DwgklIvmzA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5edc0500968f6028e0559ff8/9UHUWRR8FXhUSHa7gx4Yz.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'TamThai'], 'count': 2}, {'reaction': '❤️', 'users': ['permutans', 'ajankelo'], 'count': 2}]",2025-01-17 15:47:22,2025-01-17 15:51:59.400,[],/posts/jxm/666693214502175,706,"{'language': 'en', 'probability': 0.9281346797943115}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,939754790816678,"[{'type': 'text', 'value': 'Everything that happened this week in open AI, a recap 🤠 ', 'raw': 'Everything that happened this week in open AI, a recap 🤠 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/jan-17-releases-678a673a9de4a4675f215bf5'}, 'url': 'https://huggingface.co/collections/merve/jan-17-releases-678a673a9de4a4675f215bf5', 'raw': 'https://huggingface.co/collections/merve/jan-17-releases-678a673a9de4a4675f215bf5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👀 Multimodal', 'raw': '👀 Multimodal'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MiniCPM-o 2.6 is a new sota any-to-any model by OpenBMB', 'raw': '- MiniCPM-o 2.6 is a new sota any-to-any model by OpenBMB'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' (vision, speech and text!)', 'raw': ' (vision, speech and text!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- VideoChat-Flash-Qwen2.5-2B is new video multimodal models by OpenGVLab that come in sizes 2B & 7B in resolutions 224 & 448', 'raw': '- VideoChat-Flash-Qwen2.5-2B is new video multimodal models by OpenGVLab that come in sizes 2B & 7B in resolutions 224 & 448'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ByteDance released larger SA2VA that comes in 26B parameters', 'raw': '- ByteDance released larger SA2VA that comes in 26B parameters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dataset: VRC-Bench is a new diverse benchmark for multimodal LLM reasoning performance', 'raw': '- Dataset: VRC-Bench is a new diverse benchmark for multimodal LLM reasoning performance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💬 LLMs', 'raw': '💬 LLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MiniMax-Text-01 is a new huge language model (456B passive 45.9B active params) by MiniMaxAI with context length of 4M tokens 🤯', 'raw': '- MiniMax-Text-01 is a new huge language model (456B passive 45.9B active params) by MiniMaxAI with context length of 4M tokens 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dataset: Sky-T1-data-17k is a diverse dataset used to train Sky-T1-32B', 'raw': '- Dataset: Sky-T1-data-17k is a diverse dataset used to train Sky-T1-32B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- kyutai released Helium-1-Preview-2B is a new small multilingual LM', 'raw': '- kyutai released Helium-1-Preview-2B is a new small multilingual LM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Wayfarer-12B is a new LLM able to write D&D 🧙🏻\u200d♂️', 'raw': '- Wayfarer-12B is a new LLM able to write D&D 🧙🏻\u200d♂️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ReaderLM-v2 is a new HTML parsing model by Jina AI', 'raw': '- ReaderLM-v2 is a new HTML parsing model by Jina AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dria released, Dria-Agent-a-3B, new agentic coding model (Pythonic function calling) based on Qwen2.5 Coder', 'raw': '- Dria released, Dria-Agent-a-3B, new agentic coding model (Pythonic function calling) based on Qwen2.5 Coder'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Unsloth released Phi-4, faster and memory efficient Llama 3.3', 'raw': '- Unsloth released Phi-4, faster and memory efficient Llama 3.3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ Vision', 'raw': '🖼️ Vision'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MatchAnything is a new foundation model for matching', 'raw': '- MatchAnything is a new foundation model for matching'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- FitDit is a high-fidelity VTON model based on DiT architecture', 'raw': '- FitDit is a high-fidelity VTON model based on DiT architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗣️ Audio', 'raw': '🗣️ Audio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- OuteTTS-0.3-1B is a new multilingual text-to-speech model with voice cloning and emotion control capabilities', 'raw': '- OuteTTS-0.3-1B is a new multilingual text-to-speech model with voice cloning and emotion control capabilities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 Retrieval', 'raw': '📖 Retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- lightblue released a new reranker based on Qwen2.5 LB-reranker-0.5B-v1.0 that can handle 95+ languages', 'raw': '- lightblue released a new reranker based on Qwen2.5 LB-reranker-0.5B-v1.0 that can handle 95+ languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- cde-small-v2 is a new sota small retrieval model by ', 'raw': '- cde-small-v2 is a new sota small retrieval model by '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'jxm', 'raw': '@jxm'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Everything that happened this week in open AI, a recap 🤠 https://huggingface.co/collections/merve/jan-17-releases-678a673a9de4a4675f215bf5 + +👀 Multimodal +- MiniCPM-o 2.6 is a new sota any-to-any model by OpenBMB + (vision, speech and text!) +- VideoChat-Flash-Qwen2.5-2B is new video multimodal models by OpenGVLab that come in sizes 2B & 7B in resolutions 224 & 448 +- ByteDance released larger SA2VA that comes in 26B parameters +- Dataset: VRC-Bench is a new diverse benchmark for multimodal LLM reasoning performance + +💬 LLMs +- MiniMax-Text-01 is a new huge language model (456B passive 45.9B active params) by MiniMaxAI with context length of 4M tokens 🤯 +- Dataset: Sky-T1-data-17k is a diverse dataset used to train Sky-T1-32B +- kyutai released Helium-1-Preview-2B is a new small multilingual LM +- Wayfarer-12B is a new LLM able to write D&D 🧙🏻‍♂️ +- ReaderLM-v2 is a new HTML parsing model by Jina AI + +- Dria released, Dria-Agent-a-3B, new agentic coding model (Pythonic function calling) based on Qwen2.5 Coder +- Unsloth released Phi-4, faster and memory efficient Llama 3.3 + +🖼️ Vision +- MatchAnything is a new foundation model for matching +- FitDit is a high-fidelity VTON model based on DiT architecture + +🗣️ Audio +- OuteTTS-0.3-1B is a new multilingual text-to-speech model with voice cloning and emotion control capabilities + +📖 Retrieval +- lightblue released a new reranker based on Qwen2.5 LB-reranker-0.5B-v1.0 that can handle 95+ languages +- cde-small-v2 is a new sota small retrieval model by +@jxm ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/vjiSkBhYuLcNqNhDC0BvA.png'}]","[{'_id': '5edc0500968f6028e0559ff8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1652114014657-5edc0500968f6028e0559ff8.jpeg', 'fullname': 'Jack Morris', 'name': 'jxm', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 52}]","[{'reaction': '❤️', 'users': ['Dref360', 'pagezyhf', 's-emanuilov', 'TheDrunkenSnail', 'John6666', 'fdaudens', 'Cuiunbo', 'yogeshangajala', 'samjtro', 'travisking', 'prithivMLmods', 'benjamin-paine', 'ravi4198', 'AtAndDev'], 'count': 14}, {'reaction': '🔥', 'users': ['yogeshangajala', 'AtAndDev', 'm8x7b'], 'count': 3}, {'reaction': '🤗', 'users': ['Cuiunbo', 'AtAndDev'], 'count': 2}]",2025-01-17 15:22:15,2025-01-17 15:22:15.464,[],/posts/merve/939754790816678,2644,"{'language': 'en', 'probability': 0.8246466517448425}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg,2185.0,Hafedh Hichri,not-lain,973655794967929,"[{'type': 'text', 'value': 'we now have more than 2000 public AI models using ModelHubMixin🤗', 'raw': 'we now have more than 2000 public AI models using ModelHubMixin🤗'}]",we now have more than 2000 public AI models using ModelHubMixin🤗,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/bJJQNc8-5dD7FHuKwXZN7.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'chemouda', 'xi0v', 'p3nGu1nZz'], 'count': 4}, {'reaction': '🤗', 'users': ['John6666', 'p3nGu1nZz'], 'count': 2}, {'reaction': '👍', 'users': ['ZhengPeng7', 'p3nGu1nZz'], 'count': 2}]",2025-01-17 14:04:00,2025-01-17 14:04:00.378,[],/posts/not-lain/973655794967929,1778,"{'language': 'en', 'probability': 0.87556391954422}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,585507841186093,"[{'type': 'text', 'value': 'You can now use the ""Synthetic Data Generator"" at a much larger scale with your preferred inference engine: Ollama, vLLM, TGI, and serverless inference! 🔥', 'raw': 'You can now use the ""Synthetic Data Generator"" at a much larger scale with your preferred inference engine: Ollama, vLLM, TGI, and serverless inference! 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Install, configure, launch!', 'raw': 'Install, configure, launch!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Space: ', 'raw': 'Space: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/argilla/synthetic-data-generator?duplicate=true', 'raw': 'https://huggingface.co/spaces/argilla/synthetic-data-generator?duplicate=true'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Examples: ', 'raw': 'Examples: '}, {'type': 'link', 'href': 'https://github.com/argilla-io/synthetic-data-generator/tree/main/examples', 'raw': 'https://github.com/argilla-io/synthetic-data-generator/tree/main/examples'}]","You can now use the ""Synthetic Data Generator"" at a much larger scale with your preferred inference engine: Ollama, vLLM, TGI, and serverless inference! 🔥 + +Install, configure, launch! + +Space: https://huggingface.co/spaces/argilla/synthetic-data-generator?duplicate=true +Examples: https://github.com/argilla-io/synthetic-data-generator/tree/main/examples",[],[],"[{'reaction': '👀', 'users': ['AkimfromParis', 'John6666', 'djuna', 'dvilasuero'], 'count': 4}, {'reaction': '❤️', 'users': ['dvilasuero'], 'count': 1}, {'reaction': '🔥', 'users': ['dvilasuero'], 'count': 1}]",2025-01-17 11:19:08,2025-01-17 11:19:08.534,[],/posts/davidberenstein1957/585507841186093,1268,"{'language': 'en', 'probability': 0.6727671027183533}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63f7888abd28622c9b9a0b80/5t6JU_Cm7yFYTRUGr9eqH.jpeg,78.0,Natalia Elvira,nataliaElv,155128777456813,"[{'type': 'text', 'value': 'New chapter in the Hugging Face NLP course! 🤗 🚀 ', 'raw': 'New chapter in the Hugging Face NLP course! 🤗 🚀 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We've added a new chapter about the very basics of Argilla to the Hugging Face NLP course. Learn how to set up an Argilla instance, load & annotate datasets, and export them to the Hub.\xa0"", 'raw': ""We've added a new chapter about the very basics of Argilla to the Hugging Face NLP course. Learn how to set up an Argilla instance, load & annotate datasets, and export them to the Hub.\xa0""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Any feedback for improvements welcome!', 'raw': 'Any feedback for improvements welcome!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/learn/nlp-course/chapter10', 'raw': 'https://huggingface.co/learn/nlp-course/chapter10'}]","New chapter in the Hugging Face NLP course! 🤗 🚀 + +We've added a new chapter about the very basics of Argilla to the Hugging Face NLP course. Learn how to set up an Argilla instance, load & annotate datasets, and export them to the Hub.  + +Any feedback for improvements welcome! + +https://huggingface.co/learn/nlp-course/chapter10",[],[],"[{'reaction': '❤️', 'users': ['dvilasuero', 'gabrielmbmb', 'gsarti', 'John6666', 'AdinaY', 'Richardnicholas439', 'yhz09'], 'count': 7}, {'reaction': '🔥', 'users': ['dvilasuero', 'gabrielmbmb', 'John6666'], 'count': 3}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}]",2025-01-17 09:48:20,2025-01-17 09:48:20.641,[],/posts/nataliaElv/155128777456813,1526,"{'language': 'en', 'probability': 0.8166660666465759}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,242926717708998,"[{'type': 'text', 'value': '🔥 The AI Agent hype is real! This blog post deep dives into everything you need to know before deploying them: from key definitions to practical recommendations. A must-read for anyone building the future of autonomous systems.', 'raw': '🔥 The AI Agent hype is real! This blog post deep dives into everything you need to know before deploying them: from key definitions to practical recommendations. A must-read for anyone building the future of autonomous systems.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Key insight: A clear table breaking down the 5 levels of AI agents - from simple processors to fully autonomous systems. Essential framework for understanding where your agent stands on the autonomy spectrum', 'raw': '📊 Key insight: A clear table breaking down the 5 levels of AI agents - from simple processors to fully autonomous systems. Essential framework for understanding where your agent stands on the autonomy spectrum'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚖️ Deep analysis of 15 core values reveals critical trade-offs: accuracy, privacy, safety, equity & more. The same features that make agents powerful can make them risky. Understanding these trade-offs is crucial for responsible deployment', 'raw': '⚖️ Deep analysis of 15 core values reveals critical trade-offs: accuracy, privacy, safety, equity & more. The same features that make agents powerful can make them risky. Understanding these trade-offs is crucial for responsible deployment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 6 key recommendations for the road ahead:', 'raw': '🎯 6 key recommendations for the road ahead:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Create rigorous evaluation protocols', 'raw': '- Create rigorous evaluation protocols'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Study societal effects', 'raw': '- Study societal effects'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Understand ripple effects', 'raw': '- Understand ripple effects'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Improve transparency', 'raw': '- Improve transparency'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Open source can make a positive difference ', 'raw': '- Open source can make a positive difference '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Monitor base model evolution', 'raw': '- Monitor base model evolution'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the blog post: ', 'raw': 'Read the blog post: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/ethics-soc-7', 'raw': 'https://huggingface.co/blog/ethics-soc-7'}, {'type': 'text', 'value': ' Brillant work by ', 'raw': ' Brillant work by '}, {'type': 'mention', 'user': 'meg', 'raw': '@meg'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'evijit', 'raw': '@evijit'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'sasha', 'raw': '@sasha'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'giadap', 'raw': '@giadap'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","🔥 The AI Agent hype is real! This blog post deep dives into everything you need to know before deploying them: from key definitions to practical recommendations. A must-read for anyone building the future of autonomous systems. + +📊 Key insight: A clear table breaking down the 5 levels of AI agents - from simple processors to fully autonomous systems. Essential framework for understanding where your agent stands on the autonomy spectrum + +⚖️ Deep analysis of 15 core values reveals critical trade-offs: accuracy, privacy, safety, equity & more. The same features that make agents powerful can make them risky. Understanding these trade-offs is crucial for responsible deployment + +🎯 6 key recommendations for the road ahead: +- Create rigorous evaluation protocols +- Study societal effects +- Understand ripple effects +- Improve transparency +- Open source can make a positive difference +- Monitor base model evolution + +Read the blog post: https://huggingface.co/blog/ethics-soc-7 Brillant work by @meg @evijit @sasha @giadap ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/CoXL-B9cmV6hMwFVPfdHH.jpeg'}]","[{'_id': '6413251362e6057cbb6259bd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6413251362e6057cbb6259bd/rPwZpEXzoTCVq07w_vDYz.png', 'fullname': 'Avijit Ghosh', 'name': 'evijit', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 63}, {'_id': '6051e59531c5be7f3dd5ebc9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6051e59531c5be7f3dd5ebc9/iW1huuI60224DPBzn2cki.jpeg', 'fullname': 'Giada Pistilli', 'name': 'giadap', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 141}, {'_id': '60c757ea5f9a76ab3f844f12', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1626214544196-60c757ea5f9a76ab3f844f12.png', 'fullname': 'Margaret Mitchell', 'name': 'meg', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 190}, {'_id': '60edd0133e2c73a9a21455f5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60edd0133e2c73a9a21455f5/yK1G-Fv-YjYb7v_chkz3p.jpeg', 'fullname': 'Sasha Luccioni', 'name': 'sasha', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 282}]","[{'reaction': '🚀', 'users': ['John6666', 'giadap', 'yoeldcd', 'naturelizer', 'ucsahin', 'TheDrunkenSnail', 'proskurinartwork'], 'count': 7}]",2025-01-14 01:02:33,2025-01-14 01:02:33.379,[],/posts/fdaudens/242926717708998,2375,"{'language': 'en', 'probability': 0.8133872747421265}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/669c89e98f2dbc203f9e74ab/higvnXEHeo_Ig2bgTpn47.png,41.0,Vincent Granville,vincentg64,849462666975009,"[{'type': 'text', 'value': '9 Tips to Design Hallucination-Free RAG/LLM Systems', 'raw': '9 Tips to Design Hallucination-Free RAG/LLM Systems'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And in our case (see ', 'raw': 'And in our case (see '}, {'type': 'link', 'href': 'https://mltblog.com/4fPuvTb', 'raw': 'https://mltblog.com/4fPuvTb'}, {'type': 'text', 'value': '), with no training and zero parameter! By zero parameter, I mean no neural network parameters (the typical 40B you see in many LLMs, that stands for 40 billion parameters also called weights). We do indeed have a few intuitive parameters that you can fine-tune in real time.', 'raw': '), with no training and zero parameter! By zero parameter, I mean no neural network parameters (the typical 40B you see in many LLMs, that stands for 40 billion parameters also called weights). We do indeed have a few intuitive parameters that you can fine-tune in real time.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tips to make your system hallucination-free:', 'raw': 'Tips to make your system hallucination-free:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- We use sub-LLMs specific to each topic (part of a large corpus), thus mixing unrelated items is much less likely to happen.', 'raw': '- We use sub-LLMs specific to each topic (part of a large corpus), thus mixing unrelated items is much less likely to happen.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- In the base version, the output returned is unaltered rather than reworded. The latter can cause hallucinations.', 'raw': '- In the base version, the output returned is unaltered rather than reworded. The latter can cause hallucinations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It shows a high-level structured summary first, with category, tags, agents attached to each item; the user can click on the items he is most interested in based on summary, reducing the risk of misfit.', 'raw': '- It shows a high-level structured summary first, with category, tags, agents attached to each item; the user can click on the items he is most interested in based on summary, reducing the risk of misfit.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- The user can specify agents, tags or categories in the UI, it's much more than a prompt box. He can also include negative keywords, joint keywords that must appear jointly in the corpus, put a higher weight on the first keyword in the prompt, or favor the most recent material in the results. "", 'raw': ""- The user can specify agents, tags or categories in the UI, it's much more than a prompt box. He can also include negative keywords, joint keywords that must appear jointly in the corpus, put a higher weight on the first keyword in the prompt, or favor the most recent material in the results. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Python libraries can cause hallucinations. For instance, project and projected have the same stem. We use these libraries but with workarounds to avoid these issues that can lead to hallucinations. ', 'raw': '- Python libraries can cause hallucinations. For instance, project and projected have the same stem. We use these libraries but with workarounds to avoid these issues that can lead to hallucinations. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- We return a relevancy score to each item in the prompt results, ranging from 0 to 10. If we cannot find highly relevant information in your augmented corpus, despite using a synonyms dictionary, the score will be low, telling you that the system knows that this particular item is not great. You can choose to no show items with a low score, though sometimes they contain unexpectedly interesting information (the reason to keep them).', 'raw': '- We return a relevancy score to each item in the prompt results, ranging from 0 to 10. If we cannot find highly relevant information in your augmented corpus, despite using a synonyms dictionary, the score will be low, telling you that the system knows that this particular item is not great. You can choose to no show items with a low score, though sometimes they contain unexpectedly interesting information (the reason to keep them).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- We show links and references, all coming from reliable sources. The user can double-check in case of doubt.', 'raw': '- We show links and references, all coming from reliable sources. The user can double-check in case of doubt.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- We suggest alternate keywords to use in your next prompts (related concept)', 'raw': '- We suggest alternate keywords to use in your next prompts (related concept)'}]","9 Tips to Design Hallucination-Free RAG/LLM Systems + +And in our case (see https://mltblog.com/4fPuvTb), with no training and zero parameter! By zero parameter, I mean no neural network parameters (the typical 40B you see in many LLMs, that stands for 40 billion parameters also called weights). We do indeed have a few intuitive parameters that you can fine-tune in real time. + +Tips to make your system hallucination-free: + +- We use sub-LLMs specific to each topic (part of a large corpus), thus mixing unrelated items is much less likely to happen. + +- In the base version, the output returned is unaltered rather than reworded. The latter can cause hallucinations. + +- It shows a high-level structured summary first, with category, tags, agents attached to each item; the user can click on the items he is most interested in based on summary, reducing the risk of misfit. + +- The user can specify agents, tags or categories in the UI, it's much more than a prompt box. He can also include negative keywords, joint keywords that must appear jointly in the corpus, put a higher weight on the first keyword in the prompt, or favor the most recent material in the results. + +- Python libraries can cause hallucinations. For instance, project and projected have the same stem. We use these libraries but with workarounds to avoid these issues that can lead to hallucinations. + +- We return a relevancy score to each item in the prompt results, ranging from 0 to 10. If we cannot find highly relevant information in your augmented corpus, despite using a synonyms dictionary, the score will be low, telling you that the system knows that this particular item is not great. You can choose to no show items with a low score, though sometimes they contain unexpectedly interesting information (the reason to keep them). + +- We show links and references, all coming from reliable sources. The user can double-check in case of doubt. + +- We suggest alternate keywords to use in your next prompts (related concept)","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/669c89e98f2dbc203f9e74ab/YGA1oKnKEldMEJdpBKc7p.png'}]",[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2025-01-14 00:05:21,2025-01-15 07:53:58.596,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '669c89e98f2dbc203f9e74ab', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/669c89e98f2dbc203f9e74ab/higvnXEHeo_Ig2bgTpn47.png', 'fullname': 'Vincent Granville', 'name': 'vincentg64', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 41, 'isFollowing': False}]",/posts/vincentg64/849462666975009,505,"{'language': 'en', 'probability': 0.9140932559967041}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,124705925456272,"[{'type': 'resource', 'resource': {'type': 'org', 'id': 'none-yet'}, 'url': 'https://huggingface.co/none-yet', 'raw': 'https://huggingface.co/none-yet', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/DMUQ5tetO2NhIRqDsV0V4.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'contribute to my org', 'raw': 'contribute to my org'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""or don't"", 'raw': ""or don't""}]","https://huggingface.co/none-yet +contribute to my org + +or don't",[],[],"[{'reaction': '🔥', 'users': ['pepper13', 'victor', 'nroggendorff', 'reonyy', 'EzioT'], 'count': 5}, {'reaction': '🚀', 'users': ['pepper13', 'nroggendorff', 'EzioT'], 'count': 3}, {'reaction': '👀', 'users': ['pepper13', 'victor'], 'count': 2}, {'reaction': '😎', 'users': ['pepper13', 'John6666'], 'count': 2}, {'reaction': '❤️', 'users': ['pepper13'], 'count': 1}, {'reaction': '🤗', 'users': ['pepper13'], 'count': 1}, {'reaction': '➕', 'users': ['pepper13'], 'count': 1}, {'reaction': '🧠', 'users': ['pepper13'], 'count': 1}, {'reaction': '🤝', 'users': ['pepper13'], 'count': 1}]",2025-01-13 20:27:34,2025-01-15 11:22:31.698,"[{'_id': '66fe8fb27d722f0879b4631f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg', 'fullname': 'Sk md saad amin', 'name': 'Reality123b', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}]",/posts/nroggendorff/124705925456272,2441,"{'language': 'en', 'probability': 0.8863173723220825}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1626214544196-60c757ea5f9a76ab3f844f12.png,190.0,Margaret Mitchell,meg,691102370872165,"[{'type': 'text', 'value': '💫...And we\'re live!💫 Seasonal newsletter from ethicsy folks at Hugging Face, exploring the ethics of ""AI Agents""', 'raw': '💫...And we\'re live!💫 Seasonal newsletter from ethicsy folks at Hugging Face, exploring the ethics of ""AI Agents""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/ethics-soc-7', 'raw': 'https://huggingface.co/blog/ethics-soc-7'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our analyses found:', 'raw': 'Our analyses found:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- There\'s a spectrum of ""agent""-ness', 'raw': '- There\'s a spectrum of ""agent""-ness'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- *Safety* is a key issue, leading to many other value-based concerns', 'raw': '- *Safety* is a key issue, leading to many other value-based concerns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read for details & what to do next!', 'raw': 'Read for details & what to do next!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With ', 'raw': 'With '}, {'type': 'mention', 'user': 'evijit', 'raw': '@evijit'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'giadap', 'raw': '@giadap'}, {'type': 'text', 'value': ' , and ', 'raw': ' , and '}, {'type': 'mention', 'user': 'sasha', 'raw': '@sasha'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","💫...And we're live!💫 Seasonal newsletter from ethicsy folks at Hugging Face, exploring the ethics of ""AI Agents"" +https://huggingface.co/blog/ethics-soc-7 +Our analyses found: +- There's a spectrum of ""agent""-ness +- *Safety* is a key issue, leading to many other value-based concerns +Read for details & what to do next! +With @evijit , @giadap , and @sasha ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60c757ea5f9a76ab3f844f12/sc6ZYZhC_Ok9l3AlG5Cgk.png'}]","[{'_id': '6413251362e6057cbb6259bd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6413251362e6057cbb6259bd/rPwZpEXzoTCVq07w_vDYz.png', 'fullname': 'Avijit Ghosh', 'name': 'evijit', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 63}, {'_id': '6051e59531c5be7f3dd5ebc9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6051e59531c5be7f3dd5ebc9/iW1huuI60224DPBzn2cki.jpeg', 'fullname': 'Giada Pistilli', 'name': 'giadap', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 141}, {'_id': '60edd0133e2c73a9a21455f5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60edd0133e2c73a9a21455f5/yK1G-Fv-YjYb7v_chkz3p.jpeg', 'fullname': 'Sasha Luccioni', 'name': 'sasha', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 282}]","[{'reaction': '🔥', 'users': ['yjernite', 'cyrilzakka', 'evijit', 'John6666', 'giadap', 'not-lain', 'noellebrush', 'davidberenstein1957'], 'count': 8}]",2025-01-13 19:42:42,2025-01-13 19:43:03.240,[],/posts/meg/691102370872165,3388,"{'language': 'en', 'probability': 0.8622910976409912}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594144055859-5ee3a7cd2a3eae3cbdad1305.jpeg,301.0,Yacine Jernite,yjernite,113623605287739,"[{'type': 'text', 'value': '🤗👤 💻 Speaking of AI agents ...', 'raw': '🤗👤 💻 Speaking of AI agents ...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '...Is easier with the right words ;)', 'raw': '...Is easier with the right words ;)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My colleagues ', 'raw': 'My colleagues '}, {'type': 'mention', 'user': 'meg', 'raw': '@meg'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'evijit', 'raw': '@evijit'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'sasha', 'raw': '@sasha'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'giadap', 'raw': '@giadap'}, {'type': 'text', 'value': ' just published a wonderful blog post outlining some of the main relevant notions with their signature blend of value-informed and risk-benefits contrasting approach. Go have a read!', 'raw': ' just published a wonderful blog post outlining some of the main relevant notions with their signature blend of value-informed and risk-benefits contrasting approach. Go have a read!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/ethics-soc-7', 'raw': 'https://huggingface.co/blog/ethics-soc-7'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","🤗👤 💻 Speaking of AI agents ... +...Is easier with the right words ;) + +My colleagues @meg @evijit @sasha and @giadap just published a wonderful blog post outlining some of the main relevant notions with their signature blend of value-informed and risk-benefits contrasting approach. Go have a read! + +https://huggingface.co/blog/ethics-soc-7 ",[],"[{'_id': '6413251362e6057cbb6259bd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6413251362e6057cbb6259bd/rPwZpEXzoTCVq07w_vDYz.png', 'fullname': 'Avijit Ghosh', 'name': 'evijit', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 63}, {'_id': '6051e59531c5be7f3dd5ebc9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6051e59531c5be7f3dd5ebc9/iW1huuI60224DPBzn2cki.jpeg', 'fullname': 'Giada Pistilli', 'name': 'giadap', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 141}, {'_id': '60c757ea5f9a76ab3f844f12', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1626214544196-60c757ea5f9a76ab3f844f12.png', 'fullname': 'Margaret Mitchell', 'name': 'meg', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 190}, {'_id': '60edd0133e2c73a9a21455f5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60edd0133e2c73a9a21455f5/yK1G-Fv-YjYb7v_chkz3p.jpeg', 'fullname': 'Sasha Luccioni', 'name': 'sasha', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 282}]","[{'reaction': '🔥', 'users': ['cyrilzakka', 'evijit', 'John6666', 'giadap'], 'count': 4}]",2025-01-13 19:36:26,2025-01-13 19:42:19.579,[],/posts/yjernite/113623605287739,2427,"{'language': 'en', 'probability': 0.8446654081344604}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/w3Z6xyKVBA6np65Tb16dP.jpeg,68.0,Simon Pagezy,pagezyhf,759493474497681,"[{'type': 'text', 'value': 'Learn how to deploy multiple LoRA adapters on Vertex AI with this blogpost, using Hugging Face Deep Learning Containers on GCP.', 'raw': 'Learn how to deploy multiple LoRA adapters on Vertex AI with this blogpost, using Hugging Face Deep Learning Containers on GCP.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://medium.com/google-cloud/open-models-on-vertex-ai-with-hugging-face-serving-multiple-lora-adapters-on-vertex-ai-e3ceae7b717c', 'raw': 'https://medium.com/google-cloud/open-models-on-vertex-ai-with-hugging-face-serving-multiple-lora-adapters-on-vertex-ai-e3ceae7b717c'}]","Learn how to deploy multiple LoRA adapters on Vertex AI with this blogpost, using Hugging Face Deep Learning Containers on GCP. + +https://medium.com/google-cloud/open-models-on-vertex-ai-with-hugging-face-serving-multiple-lora-adapters-on-vertex-ai-e3ceae7b717c",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-01-13 18:00:57,2025-01-13 18:00:57.854,[],/posts/pagezyhf/759493474497681,464,"{'language': 'en', 'probability': 0.6179152727127075}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,838321188021267,"[{'type': 'text', 'value': 'Introducing scandi-fine-web-cleaner ', 'raw': 'Introducing scandi-fine-web-cleaner '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'davanstrien/scandi-fine-web-cleaner'}, 'url': 'https://huggingface.co/davanstrien/scandi-fine-web-cleaner', 'raw': 'https://huggingface.co/davanstrien/scandi-fine-web-cleaner'}, {'type': 'text', 'value': ', the first model trained on FineWeb-C community annotations!', 'raw': ', the first model trained on FineWeb-C community annotations!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FineWeb2 is a massive multilingual dataset for pre-training language models. Like any web-scale dataset, it contains low-quality content. How can we improve it?', 'raw': 'FineWeb2 is a massive multilingual dataset for pre-training language models. Like any web-scale dataset, it contains low-quality content. How can we improve it?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Over the past months, an amazing community of 400+ annotators has been labelling content quality (using Argilla) across 23 languages through the FineWeb-C initiative.', 'raw': 'Over the past months, an amazing community of 400+ annotators has been labelling content quality (using Argilla) across 23 languages through the FineWeb-C initiative.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Today, I'm happy to share the first classifier trained on this data."", 'raw': ""Today, I'm happy to share the first classifier trained on this data.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🔍 What we've built: "", 'raw': ""🔍 What we've built: ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- A lightweight classifier that efficiently removes low-quality content', 'raw': '- A lightweight classifier that efficiently removes low-quality content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 90%+ precision demonstrated on Danish & Swedish', 'raw': '- 90%+ precision demonstrated on Danish & Swedish'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Can process the 43M+ documents in Danish FineWeb2 with minimal compute', 'raw': '- Can process the 43M+ documents in Danish FineWeb2 with minimal compute'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌍 Why this matters: The approach can be reproduced for any of the 23 languages in FineWeb-C (', 'raw': '🌍 Why this matters: The approach can be reproduced for any of the 23 languages in FineWeb-C ('}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'data-is-better-together/fineweb-c'}, 'url': 'https://huggingface.co/datasets/data-is-better-together/fineweb-c', 'raw': 'https://huggingface.co/datasets/data-is-better-together/fineweb-c'}, {'type': 'text', 'value': '). We can improve training data quality at scale without massive compute resources by starting with community annotations and training small, efficient classifiers.', 'raw': '). We can improve training data quality at scale without massive compute resources by starting with community annotations and training small, efficient classifiers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Want to build a classifier for your language? Check out the full blog post with code examples and implementation details: ', 'raw': 'Want to build a classifier for your language? Check out the full blog post with code examples and implementation details: '}, {'type': 'link', 'href': 'https://danielvanstrien.xyz/posts/2025/FineWeb-c/scandinavian-content-filtering-fineweb.html', 'raw': 'https://danielvanstrien.xyz/posts/2025/FineWeb-c/scandinavian-content-filtering-fineweb.html'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","Introducing scandi-fine-web-cleaner https://huggingface.co/davanstrien/scandi-fine-web-cleaner, the first model trained on FineWeb-C community annotations! + +FineWeb2 is a massive multilingual dataset for pre-training language models. Like any web-scale dataset, it contains low-quality content. How can we improve it? + +Over the past months, an amazing community of 400+ annotators has been labelling content quality (using Argilla) across 23 languages through the FineWeb-C initiative. + +Today, I'm happy to share the first classifier trained on this data. + +🔍 What we've built: + +- A lightweight classifier that efficiently removes low-quality content +- 90%+ precision demonstrated on Danish & Swedish +- Can process the 43M+ documents in Danish FineWeb2 with minimal compute + +🌍 Why this matters: The approach can be reproduced for any of the 23 languages in FineWeb-C (https://huggingface.co/datasets/data-is-better-together/fineweb-c). We can improve training data quality at scale without massive compute resources by starting with community annotations and training small, efficient classifiers. + +Want to build a classifier for your language? Check out the full blog post with code examples and implementation details: https://danielvanstrien.xyz/posts/2025/FineWeb-c/scandinavian-content-filtering-fineweb.html +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60107b385ac3e86b3ea4fc34/luXeg-3f6nCqvxF5cVINX.png'}]",[],"[{'reaction': '🔥', 'users': ['saattrupdan', 'menbom', 'KnutJaegersberg', 'KennethEnevoldsen', 'Josephgflowers', 'John6666'], 'count': 6}, {'reaction': '🤗', 'users': ['davanstrien', 'rasgaard', 'saattrupdan', 'KennethEnevoldsen'], 'count': 4}]",2025-01-13 17:14:34,2025-01-13 17:24:05.346,"[{'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638, 'isFollowing': False}]",/posts/davanstrien/838321188021267,3093,"{'language': 'en', 'probability': 0.8167424201965332}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1669551186189-63732ebbbd81fae2b3aaf3fb.jpeg,296.0,Knut Jägersberg,KnutJaegersberg,366813633044388,"[{'type': 'text', 'value': 'prithivMLmods/Phi-4-o1', 'raw': 'prithivMLmods/Phi-4-o1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Phi-4-o1'}, 'url': 'https://huggingface.co/prithivMLmods/Phi-4-o1', 'raw': 'https://huggingface.co/prithivMLmods/Phi-4-o1'}]","prithivMLmods/Phi-4-o1 + +https://huggingface.co/prithivMLmods/Phi-4-o1","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63732ebbbd81fae2b3aaf3fb/0GuttqJ5yVFCggHWSUMX1.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666'], 'count': 1}]",2025-01-13 12:19:05,2025-01-13 12:19:05.586,[],/posts/KnutJaegersberg/366813633044388,636,"{'language': 'en', 'probability': 0.1823221892118454}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,857129879440533,"[{'type': 'text', 'value': ""there's a new multimodal retrieval model in town 🤠"", 'raw': ""there's a new multimodal retrieval model in town 🤠""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LlamaIndex released vdr-2b-multi-v1', 'raw': 'LlamaIndex released vdr-2b-multi-v1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> uses 70% less image tokens, yet outperforming other dse-qwen2 based models', 'raw': '> uses 70% less image tokens, yet outperforming other dse-qwen2 based models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> 3x faster inference with less VRAM 💨', 'raw': '> 3x faster inference with less VRAM 💨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> shrinkable with matryoshka 🪆', 'raw': '> shrinkable with matryoshka 🪆'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> can do cross-lingual retrieval!', 'raw': '> can do cross-lingual retrieval!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection: ', 'raw': 'Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'llamaindex/visual-document-retrieval-678151d19d2758f78ce910e1'}, 'url': 'https://huggingface.co/collections/llamaindex/visual-document-retrieval-678151d19d2758f78ce910e1', 'raw': 'https://huggingface.co/collections/llamaindex/visual-document-retrieval-678151d19d2758f78ce910e1'}, {'type': 'text', 'value': ' (with models and datasets)', 'raw': ' (with models and datasets)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'llamaindex/multimodal_vdr_demo'}, 'url': 'https://huggingface.co/spaces/llamaindex/multimodal_vdr_demo', 'raw': 'https://huggingface.co/spaces/llamaindex/multimodal_vdr_demo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Learn more from their blog post here ', 'raw': 'Learn more from their blog post here '}, {'type': 'link', 'href': 'https://huggingface.co/blog/vdr-2b-multilingual', 'raw': 'https://huggingface.co/blog/vdr-2b-multilingual'}, {'type': 'text', 'value': ' 📖 ', 'raw': ' 📖 '}, {'type': 'new_line', 'raw': '\n'}]","there's a new multimodal retrieval model in town 🤠 +LlamaIndex released vdr-2b-multi-v1 +> uses 70% less image tokens, yet outperforming other dse-qwen2 based models +> 3x faster inference with less VRAM 💨 +> shrinkable with matryoshka 🪆 +> can do cross-lingual retrieval! +Collection: https://huggingface.co/collections/llamaindex/visual-document-retrieval-678151d19d2758f78ce910e1 (with models and datasets) +Demo: https://huggingface.co/spaces/llamaindex/multimodal_vdr_demo +Learn more from their blog post here https://huggingface.co/blog/vdr-2b-multilingual 📖 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/st9jBUfW7tk5LJRJukrjJ.png'}]",[],"[{'reaction': '❤️', 'users': ['Tonic', 'John6666', 'JackCloudman', 'nickprock', 'KnutJaegersberg', 'not-lain', 'egemensert', 'Joseph717171', 'mikpam'], 'count': 9}, {'reaction': '🔥', 'users': ['not-lain'], 'count': 1}]",2025-01-13 11:19:32,2025-01-13 11:19:32.307,[],/posts/merve/857129879440533,3909,"{'language': 'en', 'probability': 0.7794153690338135}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,172300082815827,"[{'type': 'text', 'value': 'Excited to share a groundbreaking development in recommendation systems - Legommenders, a comprehensive content-based recommendation library that revolutionizes how we approach personalized content delivery.', 'raw': 'Excited to share a groundbreaking development in recommendation systems - Legommenders, a comprehensive content-based recommendation library that revolutionizes how we approach personalized content delivery.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Key Innovations', 'raw': '>> Key Innovations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'End-to-End Training', 'raw': 'End-to-End Training'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The library enables joint training of content encoders alongside behavior and interaction modules, making it the first of its kind to offer truly integrated content understanding in recommendation pipelines.', 'raw': 'The library enables joint training of content encoders alongside behavior and interaction modules, making it the first of its kind to offer truly integrated content understanding in recommendation pipelines.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Massive Scale', 'raw': 'Massive Scale'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Supports creation and analysis of over 1,000 distinct models', 'raw': '- Supports creation and analysis of over 1,000 distinct models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Compatible with 15 diverse datasets', 'raw': '- Compatible with 15 diverse datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Features 15 content operators, 8 behavior operators, and 9 click predictors', 'raw': '- Features 15 content operators, 8 behavior operators, and 9 click predictors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Advanced LLM Integration', 'raw': 'Advanced LLM Integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Legommenders pioneers LLM integration in two crucial ways:', 'raw': 'Legommenders pioneers LLM integration in two crucial ways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- As feature encoders for enhanced content understanding', 'raw': '- As feature encoders for enhanced content understanding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- As data generators for high-quality training data augmentation', 'raw': '- As data generators for high-quality training data augmentation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Superior Architecture', 'raw': 'Superior Architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The system comprises four core components:', 'raw': 'The system comprises four core components:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dataset processor for unified data handling', 'raw': '- Dataset processor for unified data handling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Content operator for embedding generation', 'raw': '- Content operator for embedding generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Behavior operator for user sequence fusion', 'raw': '- Behavior operator for user sequence fusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Click predictor for probability calculations', 'raw': '- Click predictor for probability calculations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Performance Optimization', 'raw': 'Performance Optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The library introduces an innovative caching pipeline that achieves up to 50x speedup in evaluation compared to traditional approaches.', 'raw': 'The library introduces an innovative caching pipeline that achieves up to 50x speedup in evaluation compared to traditional approaches.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Developed by researchers from The Hong Kong Polytechnic University, this open-source project represents a significant leap forward in recommendation system technology.', 'raw': 'Developed by researchers from The Hong Kong Polytechnic University, this open-source project represents a significant leap forward in recommendation system technology.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For those interested in content-based recommendation systems, this is a must-explore tool. The library is available on GitHub for implementation and experimentation.', 'raw': 'For those interested in content-based recommendation systems, this is a must-explore tool. The library is available on GitHub for implementation and experimentation.'}]","Excited to share a groundbreaking development in recommendation systems - Legommenders, a comprehensive content-based recommendation library that revolutionizes how we approach personalized content delivery. + +>> Key Innovations + +End-to-End Training +The library enables joint training of content encoders alongside behavior and interaction modules, making it the first of its kind to offer truly integrated content understanding in recommendation pipelines. + +Massive Scale +- Supports creation and analysis of over 1,000 distinct models +- Compatible with 15 diverse datasets +- Features 15 content operators, 8 behavior operators, and 9 click predictors + +Advanced LLM Integration +Legommenders pioneers LLM integration in two crucial ways: +- As feature encoders for enhanced content understanding +- As data generators for high-quality training data augmentation + +Superior Architecture +The system comprises four core components: +- Dataset processor for unified data handling +- Content operator for embedding generation +- Behavior operator for user sequence fusion +- Click predictor for probability calculations + +Performance Optimization +The library introduces an innovative caching pipeline that achieves up to 50x speedup in evaluation compared to traditional approaches. + +Developed by researchers from The Hong Kong Polytechnic University, this open-source project represents a significant leap forward in recommendation system technology. + +For those interested in content-based recommendation systems, this is a must-explore tool. The library is available on GitHub for implementation and experimentation.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/huN5qk-r6bofF8HRZ3AGK.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2025-01-13 07:24:31,2025-01-13 07:24:31.185,[],/posts/singhsidhukuldeep/172300082815827,645,"{'language': 'en', 'probability': 0.8406324982643127}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,705890343346173,"[{'type': 'text', 'value': 'Famous IC-Light - Relight Images - Advanced Gradio APP with Windows, RunPod, Massed Compute and Free Kaggle Account Installers Published', 'raw': 'Famous IC-Light - Relight Images - Advanced Gradio APP with Windows, RunPod, Massed Compute and Free Kaggle Account Installers Published'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Installers are shared here : ', 'raw': 'Installers are shared here : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/famous-ic-light-119566071', 'raw': 'https://www.patreon.com/posts/famous-ic-light-119566071'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1-Click to install and use on Windows, RunPod, Massed Compute and a free Kaggle account notebook', 'raw': '1-Click to install and use on Windows, RunPod, Massed Compute and a free Kaggle account notebook'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All working perfect with more advanced Gradio app than what was officially published on official repo : ', 'raw': 'All working perfect with more advanced Gradio app than what was officially published on official repo : '}, {'type': 'link', 'href': 'https://github.com/lllyasviel/IC-Light', 'raw': 'https://github.com/lllyasviel/IC-Light'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Moreover,', 'raw': 'Moreover,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Started another experimental product training for a client. Doing FLUX Dreambooth / Finetuning via Kohya SS GUI. GPU is L40S and batch size is 7. Config name : Batch_Size_7_48GB_GPU_46250MB_29.1_second_it_Tier_1.json', 'raw': 'Started another experimental product training for a client. Doing FLUX Dreambooth / Finetuning via Kohya SS GUI. GPU is L40S and batch size is 7. Config name : Batch_Size_7_48GB_GPU_46250MB_29.1_second_it_Tier_1.json'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full workflow, step by step tutorial and configs : ', 'raw': 'Full workflow, step by step tutorial and configs : '}, {'type': 'link', 'href': 'https://youtu.be/FvpWy1x5etM', 'raw': 'https://youtu.be/FvpWy1x5etM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the attached images in full resolution fore more info', 'raw': 'Check out the attached images in full resolution fore more info'}]","Famous IC-Light - Relight Images - Advanced Gradio APP with Windows, RunPod, Massed Compute and Free Kaggle Account Installers Published + + +Installers are shared here : https://www.patreon.com/posts/famous-ic-light-119566071 + +1-Click to install and use on Windows, RunPod, Massed Compute and a free Kaggle account notebook + +All working perfect with more advanced Gradio app than what was officially published on official repo : https://github.com/lllyasviel/IC-Light + +Moreover, + +Started another experimental product training for a client. Doing FLUX Dreambooth / Finetuning via Kohya SS GUI. GPU is L40S and batch size is 7. Config name : Batch_Size_7_48GB_GPU_46250MB_29.1_second_it_Tier_1.json + +Full workflow, step by step tutorial and configs : https://youtu.be/FvpWy1x5etM + +Check out the attached images in full resolution fore more info","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/WNSfWHwgzpq9Jx20KDuw_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/AoeCKswKdQvldiqPc9_kU.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/a4_nFkobx3AOU2gtLI7GY.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/jcrUEj8wxJnJuat9poHVg.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/VUjadoIKnv7nSF6C8PRrK.png'}]",[],"[{'reaction': '🚀', 'users': ['MonsterMMORPG', 'roland0822', 'BrigitteTousi', 'Nielly'], 'count': 4}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'abhisaha', 'Awario'], 'count': 3}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666'], 'count': 2}, {'reaction': '🔥', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '❤️', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}]",2025-01-09 00:00:55,2025-01-09 00:00:55.163,[],/posts/MonsterMMORPG/705890343346173,1835,"{'language': 'en', 'probability': 0.8199204802513123}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674191139776-noauth.png,257.0,Xuan-Son Nguyen,ngxson,446370094294126,"[{'type': 'text', 'value': 'I made this small tool that can be useful for debugging Ollama chat template: ', 'raw': 'I made this small tool that can be useful for debugging Ollama chat template: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ngxson/ollama_template_test'}, 'url': 'https://huggingface.co/spaces/ngxson/ollama_template_test', 'raw': 'https://huggingface.co/spaces/ngxson/ollama_template_test'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'CC ', 'raw': 'CC '}, {'type': 'mention', 'user': 'bartowski', 'raw': '@bartowski'}, {'type': 'text', 'value': ' you may need this ;-)', 'raw': ' you may need this ;-)'}]","I made this small tool that can be useful for debugging Ollama chat template: https://huggingface.co/spaces/ngxson/ollama_template_test + +CC @bartowski you may need this ;-)",[],"[{'_id': '6435718aaaef013d1aec3b8b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg', 'fullname': 'Bartowski', 'name': 'bartowski', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7068}]","[{'reaction': '👀', 'users': ['victor', 'John6666', 'reonyy', 'Nymbo', 'Daemontatox'], 'count': 5}, {'reaction': '🚀', 'users': ['bartowski', 'so-anyway', 'Felladrin', 'gn00029914'], 'count': 4}]",2025-01-08 20:58:56,2025-01-09 15:20:47.002,"[{'_id': '66404d2ba1e3db6f3774280b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66404d2ba1e3db6f3774280b/cu2w9nT7vCalkEOhHIYDa.jpeg', 'fullname': 'So Anyway', 'name': 'so-anyway', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '63ca214abedad7e2bf1d1517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1674191139776-noauth.png', 'fullname': 'Xuan-Son Nguyen', 'name': 'ngxson', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 257, 'isFollowing': False}]",/posts/ngxson/446370094294126,3557,"{'language': 'en', 'probability': 0.7068580389022827}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,525048236877774,"[{'type': 'text', 'value': 'Groundbreaking Research Alert: Correctness ≠ Faithfulness in RAG Systems', 'raw': 'Groundbreaking Research Alert: Correctness ≠ Faithfulness in RAG Systems'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fascinating new research from L3S Research Center, University of Amsterdam, and TU Delft reveals a critical insight into Retrieval Augmented Generation (RAG) systems. The study exposes that up to 57% of citations in RAG systems could be unfaithful, despite being technically correct.', 'raw': 'Fascinating new research from L3S Research Center, University of Amsterdam, and TU Delft reveals a critical insight into Retrieval Augmented Generation (RAG) systems. The study exposes that up to 57% of citations in RAG systems could be unfaithful, despite being technically correct.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Key Technical Insights:', 'raw': '>> Key Technical Insights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Post-rationalization Problem', 'raw': 'Post-rationalization Problem'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The researchers discovered that RAG systems often engage in ""post-rationalization"" - where models first generate answers from their parametric memory and then search for supporting evidence afterward. This means that while citations may be correct, they don\'t reflect the actual reasoning process.', 'raw': 'The researchers discovered that RAG systems often engage in ""post-rationalization"" - where models first generate answers from their parametric memory and then search for supporting evidence afterward. This means that while citations may be correct, they don\'t reflect the actual reasoning process.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Experimental Design', 'raw': 'Experimental Design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The team used Command-R+ (104B parameters) with 4-bit quantization on NVIDIA A100 GPU, testing on the NaturalQuestions dataset. They employed BM25 for initial retrieval and ColBERT v2 for reranking.', 'raw': 'The team used Command-R+ (104B parameters) with 4-bit quantization on NVIDIA A100 GPU, testing on the NaturalQuestions dataset. They employed BM25 for initial retrieval and ColBERT v2 for reranking.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Attribution Framework', 'raw': 'Attribution Framework'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The research introduces a comprehensive framework for evaluating RAG systems across multiple dimensions:', 'raw': 'The research introduces a comprehensive framework for evaluating RAG systems across multiple dimensions:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Citation Correctness: Whether cited documents support the claims', 'raw': '- Citation Correctness: Whether cited documents support the claims'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Citation Faithfulness: Whether citations reflect actual model reasoning', 'raw': '- Citation Faithfulness: Whether citations reflect actual model reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Citation Appropriateness: Relevance and meaningfulness of citations', 'raw': '- Citation Appropriateness: Relevance and meaningfulness of citations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Citation Comprehensiveness: Coverage of key points', 'raw': '- Citation Comprehensiveness: Coverage of key points'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Under the Hood', 'raw': 'Under the Hood'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The system processes involve:', 'raw': 'The system processes involve:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Document relevance prediction', 'raw': '1. Document relevance prediction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Citation prediction', 'raw': '2. Citation prediction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Answer generation without citations', 'raw': '3. Answer generation without citations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Answer generation with citations', 'raw': '4. Answer generation with citations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This work fundamentally challenges our understanding of RAG systems and highlights the need for more robust evaluation metrics in AI systems that claim to provide verifiable information.', 'raw': 'This work fundamentally challenges our understanding of RAG systems and highlights the need for more robust evaluation metrics in AI systems that claim to provide verifiable information.'}, {'type': 'new_line', 'raw': '\n'}]","Groundbreaking Research Alert: Correctness ≠ Faithfulness in RAG Systems + +Fascinating new research from L3S Research Center, University of Amsterdam, and TU Delft reveals a critical insight into Retrieval Augmented Generation (RAG) systems. The study exposes that up to 57% of citations in RAG systems could be unfaithful, despite being technically correct. + +>> Key Technical Insights: + +Post-rationalization Problem +The researchers discovered that RAG systems often engage in ""post-rationalization"" - where models first generate answers from their parametric memory and then search for supporting evidence afterward. This means that while citations may be correct, they don't reflect the actual reasoning process. + +Experimental Design +The team used Command-R+ (104B parameters) with 4-bit quantization on NVIDIA A100 GPU, testing on the NaturalQuestions dataset. They employed BM25 for initial retrieval and ColBERT v2 for reranking. + +Attribution Framework +The research introduces a comprehensive framework for evaluating RAG systems across multiple dimensions: +- Citation Correctness: Whether cited documents support the claims +- Citation Faithfulness: Whether citations reflect actual model reasoning +- Citation Appropriateness: Relevance and meaningfulness of citations +- Citation Comprehensiveness: Coverage of key points + +Under the Hood +The system processes involve: +1. Document relevance prediction +2. Citation prediction +3. Answer generation without citations +4. Answer generation with citations + +This work fundamentally challenges our understanding of RAG systems and highlights the need for more robust evaluation metrics in AI systems that claim to provide verifiable information. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/yvasFOZyO16bwF_Z5Pme8.jpeg'}]",[],"[{'reaction': '👀', 'users': ['csabakecskemeti', 'John6666', 'Daemontatox'], 'count': 3}]",2025-01-08 19:53:45,2025-01-08 23:30:42.610,"[{'_id': '64e6d37e02dee9bcb9d9fa18', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg', 'fullname': 'Csaba Kecskemeti', 'name': 'csabakecskemeti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 69, 'isFollowing': False}]",/posts/singhsidhukuldeep/525048236877774,1457,"{'language': 'en', 'probability': 0.8184293508529663}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64740cf7485a7c8e1bd51ac9/qxcEOKY4M36ggryaoc4L-.jpeg,226.0,Beckett Dillon,Severian,375067343900874,"[{'type': 'text', 'value': 'Interesting Solution to the Problem of Misguided Attention', 'raw': 'Interesting Solution to the Problem of Misguided Attention'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So I\'ve been fascinated by the problem of Misguided Attention for a few weeks. I am trying to build an inference algorithm to help LLMs address that issue; but in the process, I found a cool short-term fix I call ""Mindful Attention"" using just prompt-engineering.', 'raw': 'So I\'ve been fascinated by the problem of Misguided Attention for a few weeks. I am trying to build an inference algorithm to help LLMs address that issue; but in the process, I found a cool short-term fix I call ""Mindful Attention"" using just prompt-engineering.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have you ever thought about how our brains filter reality through layers of past experiences, concepts, and mental images? For example, when you look at an oak tree, are you truly seeing that oak tree in all its unique details, or are you overlaying it with a generalized idea of ""oak tree""? This phenomenon inspired the new approach.', 'raw': 'Have you ever thought about how our brains filter reality through layers of past experiences, concepts, and mental images? For example, when you look at an oak tree, are you truly seeing that oak tree in all its unique details, or are you overlaying it with a generalized idea of ""oak tree""? This phenomenon inspired the new approach.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLMs often fall into a similar trap, hence the Misguided Attention problem. They process input not as it’s uniquely presented but through patterns and templates they’ve seen before. This leads to responses that can feel ""off,"" like missing the point of a carefully crafted prompt or defaulting to familiar but irrelevant solutions.', 'raw': 'LLMs often fall into a similar trap, hence the Misguided Attention problem. They process input not as it’s uniquely presented but through patterns and templates they’ve seen before. This leads to responses that can feel ""off,"" like missing the point of a carefully crafted prompt or defaulting to familiar but irrelevant solutions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I wanted to address this head-on by encouraging LLMs to slow down, focus, and engage directly with the input—free of assumptions. This is the core of the Mindful Attention Directive, a prompt designed to steer models away from over-generalization and back into the moment.', 'raw': 'I wanted to address this head-on by encouraging LLMs to slow down, focus, and engage directly with the input—free of assumptions. This is the core of the Mindful Attention Directive, a prompt designed to steer models away from over-generalization and back into the moment.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can read more about the broader issue here: ', 'raw': 'You can read more about the broader issue here: '}, {'type': 'link', 'href': 'https://github.com/cpldcpu/MisguidedAttention', 'raw': 'https://github.com/cpldcpu/MisguidedAttention'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And if you want to try this mindful approach in action, check out the LLM I’ve set up for testing: ', 'raw': 'And if you want to try this mindful approach in action, check out the LLM I’ve set up for testing: '}, {'type': 'link', 'href': 'https://hf.co/chat/assistant/677e7ebcb0f26b87340f032e', 'raw': 'https://hf.co/chat/assistant/677e7ebcb0f26b87340f032e'}, {'type': 'text', 'value': '. It works about 80% of the time to counteract these issues, and the results are pretty cool.', 'raw': '. It works about 80% of the time to counteract these issues, and the results are pretty cool.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'll add the Gist with the full prompt. I admit, it is quite verbose but it's the most effective one I have landed on yet. I am working on a smaller version that can be appended to any System Prompt to harness the Mindful Attention. Feel free to experiment to find a better version for the community!"", 'raw': ""I'll add the Gist with the full prompt. I admit, it is quite verbose but it's the most effective one I have landed on yet. I am working on a smaller version that can be appended to any System Prompt to harness the Mindful Attention. Feel free to experiment to find a better version for the community!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here is the Gist: ', 'raw': 'Here is the Gist: '}, {'type': 'link', 'href': 'https://gist.github.com/severian42/6dd96a94e546a38642278aeb4537cfb3', 'raw': 'https://gist.github.com/severian42/6dd96a94e546a38642278aeb4537cfb3'}]","Interesting Solution to the Problem of Misguided Attention + +So I've been fascinated by the problem of Misguided Attention for a few weeks. I am trying to build an inference algorithm to help LLMs address that issue; but in the process, I found a cool short-term fix I call ""Mindful Attention"" using just prompt-engineering. + +Have you ever thought about how our brains filter reality through layers of past experiences, concepts, and mental images? For example, when you look at an oak tree, are you truly seeing that oak tree in all its unique details, or are you overlaying it with a generalized idea of ""oak tree""? This phenomenon inspired the new approach. + +LLMs often fall into a similar trap, hence the Misguided Attention problem. They process input not as it’s uniquely presented but through patterns and templates they’ve seen before. This leads to responses that can feel ""off,"" like missing the point of a carefully crafted prompt or defaulting to familiar but irrelevant solutions. + +I wanted to address this head-on by encouraging LLMs to slow down, focus, and engage directly with the input—free of assumptions. This is the core of the Mindful Attention Directive, a prompt designed to steer models away from over-generalization and back into the moment. + +You can read more about the broader issue here: https://github.com/cpldcpu/MisguidedAttention + +And if you want to try this mindful approach in action, check out the LLM I’ve set up for testing: https://hf.co/chat/assistant/677e7ebcb0f26b87340f032e. It works about 80% of the time to counteract these issues, and the results are pretty cool. + +I'll add the Gist with the full prompt. I admit, it is quite verbose but it's the most effective one I have landed on yet. I am working on a smaller version that can be appended to any System Prompt to harness the Mindful Attention. Feel free to experiment to find a better version for the community! + +Here is the Gist: https://gist.github.com/severian42/6dd96a94e546a38642278aeb4537cfb3",[],[],"[{'reaction': '👍', 'users': ['John6666', 'Inessenialien', 'Kseniase', 'MotherSoraka', 'reonyy', 'daveid', 'aloo254', 'InterDimensionalCat', 'JustSayAlohomora', 'Smorty100', 'Dragunflie-420', 'nicoboss'], 'count': 12}, {'reaction': '❤️', 'users': ['MotherSoraka', 'reonyy', 'InterDimensionalCat', 'pabloguerrero', 'Suuribaatar', 'nicoboss'], 'count': 6}, {'reaction': '🔥', 'users': ['MotherSoraka', 'reonyy', 'SaitBurak', 'SelmaQ'], 'count': 4}, {'reaction': '👀', 'users': ['TimeLordRaps', 'Smorty100'], 'count': 2}]",2025-01-08 16:52:12,2025-01-08 16:52:12.865,[],/posts/Severian/375067343900874,3969,"{'language': 'en', 'probability': 0.9306668043136597}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,739168147599088,"[{'type': 'text', 'value': 'microsoft just released Phi-4 , check it out here : ', 'raw': 'microsoft just released Phi-4 , check it out here : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Tonic/Phi-4'}, 'url': 'https://huggingface.co/spaces/Tonic/Phi-4', 'raw': 'https://huggingface.co/spaces/Tonic/Phi-4'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'hope you like it :-)', 'raw': 'hope you like it :-)'}]","microsoft just released Phi-4 , check it out here : https://huggingface.co/spaces/Tonic/Phi-4 + +hope you like it :-)",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'reonyy', 'victor', 'Joseph717171', 'neoopus'], 'count': 5}, {'reaction': '🚀', 'users': ['John6666', 'Joseph717171'], 'count': 2}]",2025-01-08 16:22:18,2025-01-08 16:22:18.987,[],/posts/Tonic/739168147599088,1747,"{'language': 'en', 'probability': 0.7402024269104004}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63f731c7d36951307fcca6bf/DMd5-Pt7YHC0agbAQ1xUc.png,193.0,Mitko Vasilev,mitkox,245649344144062,"[{'type': 'text', 'value': ""Can it run DeepSeek V3 671B is the new 'can it run Doom'. "", 'raw': ""Can it run DeepSeek V3 671B is the new 'can it run Doom'. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""How minimalistic can I go with on device AI with behemoth models - here I'm running DeepSeek V3 MoE on a single A6000 GPU."", 'raw': ""How minimalistic can I go with on device AI with behemoth models - here I'm running DeepSeek V3 MoE on a single A6000 GPU.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Not great, not terrible, for this minimalistic setup. I love the Mixture of Experts architectures. Typically I'm running my core LLM distributed over the 4 GPUs."", 'raw': ""Not great, not terrible, for this minimalistic setup. I love the Mixture of Experts architectures. Typically I'm running my core LLM distributed over the 4 GPUs.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Make sure you own your AI. AI in the cloud is not aligned with you; it's aligned with the company that owns it."", 'raw': ""Make sure you own your AI. AI in the cloud is not aligned with you; it's aligned with the company that owns it.""}]","Can it run DeepSeek V3 671B is the new 'can it run Doom'. + +How minimalistic can I go with on device AI with behemoth models - here I'm running DeepSeek V3 MoE on a single A6000 GPU. + +Not great, not terrible, for this minimalistic setup. I love the Mixture of Experts architectures. Typically I'm running my core LLM distributed over the 4 GPUs. + +Make sure you own your AI. AI in the cloud is not aligned with you; it's aligned with the company that owns it.","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63f731c7d36951307fcca6bf/nHD0Z73wz37KXev47Jrjf.mp4'}]",[],"[{'reaction': '🔥', 'users': ['etemiz', 'fractalego', 'roland0822', 'reonyy', 'playmak3r', 'victor', 'ShadowWolf1999'], 'count': 7}, {'reaction': '➕', 'users': ['prithivMLmods', 'fractalego', 'John6666', 'reonyy'], 'count': 4}, {'reaction': '🤯', 'users': ['fractalego', 'reonyy'], 'count': 2}, {'reaction': '🤗', 'users': ['csabakecskemeti'], 'count': 1}]",2025-01-08 12:58:09,2025-01-09 02:33:51.619,"[{'_id': '63de560a15266dd945f209ca', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63de560a15266dd945f209ca/PeZf3IF-x7Qh8OcnKH12R.png', 'fullname': 'MrDragonFox', 'name': 'MrDragonFox', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 95, 'isFollowing': False}, {'_id': '63f731c7d36951307fcca6bf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63f731c7d36951307fcca6bf/DMd5-Pt7YHC0agbAQ1xUc.png', 'fullname': 'Mitko Vasilev', 'name': 'mitkox', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 193, 'isFollowing': False}, {'_id': '64e6d37e02dee9bcb9d9fa18', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg', 'fullname': 'Csaba Kecskemeti', 'name': 'csabakecskemeti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 69, 'isFollowing': False}]",/posts/mitkox/245649344144062,2519,"{'language': 'en', 'probability': 0.9344876408576965}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/qW3-oKDLFJpue2iS5VjT2.jpeg,134.0,Jason Corkill,jasoncorkill,344641955529637,"[{'type': 'text', 'value': 'We had a few people asking about the differences and methodologies of our addition to the open-image-preferences dataset. So my colleague and I wrote a blog post about it with the new huggingface blog functionality: ', 'raw': 'We had a few people asking about the differences and methodologies of our addition to the open-image-preferences dataset. So my colleague and I wrote a blog post about it with the new huggingface blog functionality: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/RapidataAI/more-image-preferences', 'raw': 'https://huggingface.co/blog/RapidataAI/more-image-preferences'}]",We had a few people asking about the differences and methodologies of our addition to the open-image-preferences dataset. So my colleague and I wrote a blog post about it with the new huggingface blog functionality: https://huggingface.co/blog/RapidataAI/more-image-preferences,[],[],"[{'reaction': '👍', 'users': ['victor', 'LucStr', 'SyedAliHusnainGillani', 'John6666'], 'count': 4}]",2025-01-08 10:00:18,2025-01-08 10:00:18.119,[],/posts/jasoncorkill/344641955529637,1629,"{'language': 'en', 'probability': 0.9381260871887207}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64edcaf8b692b67db135919d/AvbAablqqbnL-6VfdH0Rj.png,20.0,David G. R.,reddgr,794715761628699,"[{'type': 'text', 'value': ""Major update on the Talking to Chatbots dataset! Expanded the 'wrapped' dataset (one row per chat) to 2.86k records, and the 'unwrapped' version (one row per conversation turn) to 11k records. The main source is my ChatGPT archive with nearly 2 years of chats. It is still a work in progress as I incorporate chats from other sources and qualitative metrics (SCBN) for responses."", 'raw': ""Major update on the Talking to Chatbots dataset! Expanded the 'wrapped' dataset (one row per chat) to 2.86k records, and the 'unwrapped' version (one row per conversation turn) to 11k records. The main source is my ChatGPT archive with nearly 2 years of chats. It is still a work in progress as I incorporate chats from other sources and qualitative metrics (SCBN) for responses.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'reddgr/talking-to-chatbots-unwrapped-chats'}, 'url': 'https://huggingface.co/datasets/reddgr/talking-to-chatbots-unwrapped-chats', 'raw': 'https://huggingface.co/datasets/reddgr/talking-to-chatbots-unwrapped-chats'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'reddgr/talking-to-chatbots-chats'}, 'url': 'https://huggingface.co/datasets/reddgr/talking-to-chatbots-chats', 'raw': 'https://huggingface.co/datasets/reddgr/talking-to-chatbots-chats'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Major update on the Talking to Chatbots dataset! Expanded the 'wrapped' dataset (one row per chat) to 2.86k records, and the 'unwrapped' version (one row per conversation turn) to 11k records. The main source is my ChatGPT archive with nearly 2 years of chats. It is still a work in progress as I incorporate chats from other sources and qualitative metrics (SCBN) for responses. + +https://huggingface.co/datasets/reddgr/talking-to-chatbots-unwrapped-chats + +https://huggingface.co/datasets/reddgr/talking-to-chatbots-chats + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64edcaf8b692b67db135919d/7HtSY4LlCvrkoKz059hOi.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64edcaf8b692b67db135919d/ht7-v23xf5HSwu0NBN1d4.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'sudanenator', 'victor', 'AtAndDev', 'clem'], 'count': 5}, {'reaction': '🔥', 'users': ['shawnxhwu', 'AtAndDev', 'clem'], 'count': 3}, {'reaction': '🤝', 'users': ['shawnxhwu', 'AtAndDev'], 'count': 2}, {'reaction': '👍', 'users': ['zhanghe', 'AtAndDev'], 'count': 2}, {'reaction': '🚀', 'users': ['NickyNicky', 'AtAndDev'], 'count': 2}]",2025-01-07 23:28:06,2025-01-07 23:33:14.385,[],/posts/reddgr/794715761628699,2366,"{'language': 'en', 'probability': 0.8398400545120239}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62ecdc18b72a69615d6bd857/qAHhWJbSsmoezFHiErBUT.png,434.0,Daniel Han-Chen,danielhanchen,142796005127350,"[{'type': 'text', 'value': 'Deepseek V3, including GGUF + bf16 versions are now uploaded!', 'raw': 'Deepseek V3, including GGUF + bf16 versions are now uploaded!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Includes 2, 3, 4, 5, 6 and 8-bit quantized versions.', 'raw': 'Includes 2, 3, 4, 5, 6 and 8-bit quantized versions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GGUFs: ', 'raw': 'GGUFs: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'unsloth/DeepSeek-V3-GGUF'}, 'url': 'https://huggingface.co/unsloth/DeepSeek-V3-GGUF', 'raw': 'https://huggingface.co/unsloth/DeepSeek-V3-GGUF'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'bf16: ', 'raw': 'bf16: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'unsloth/DeepSeek-V3-bf16'}, 'url': 'https://huggingface.co/unsloth/DeepSeek-V3-bf16', 'raw': 'https://huggingface.co/unsloth/DeepSeek-V3-bf16'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Min. hardware requirements to run: 48GB RAM + 250GB of disk space for 2-bit.', 'raw': 'Min. hardware requirements to run: 48GB RAM + 250GB of disk space for 2-bit.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'See how to run them with examples and the full collection: ', 'raw': 'See how to run them with examples and the full collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'unsloth/deepseek-v3-all-versions-677cf5cfd7df8b7815fc723c'}, 'url': 'https://huggingface.co/collections/unsloth/deepseek-v3-all-versions-677cf5cfd7df8b7815fc723c', 'raw': 'https://huggingface.co/collections/unsloth/deepseek-v3-all-versions-677cf5cfd7df8b7815fc723c'}]","Deepseek V3, including GGUF + bf16 versions are now uploaded! + +Includes 2, 3, 4, 5, 6 and 8-bit quantized versions. + +GGUFs: https://huggingface.co/unsloth/DeepSeek-V3-GGUF +bf16: https://huggingface.co/unsloth/DeepSeek-V3-bf16 + +Min. hardware requirements to run: 48GB RAM + 250GB of disk space for 2-bit. + +See how to run them with examples and the full collection: https://huggingface.co/collections/unsloth/deepseek-v3-all-versions-677cf5cfd7df8b7815fc723c",[],[],"[{'reaction': '🔥', 'users': ['shimmyshimmer', 'etemiz', 'John6666', 'sudanenator', 'victor', 'nmika', 'JackCloudman', 'reonyy'], 'count': 8}, {'reaction': '❤️', 'users': ['shimmyshimmer', 'admarcosai', 'nmika', 'JackCloudman', 'reonyy'], 'count': 5}, {'reaction': '👍', 'users': ['mma666', 'reonyy'], 'count': 2}, {'reaction': '🤗', 'users': ['reddgr'], 'count': 1}]",2025-01-07 21:47:02,2025-05-15 15:21:03.424,"[{'_id': '6638c1488bd9205c327037b7', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/u-N90hmPiVt72nczNO-VI.png', 'fullname': 'Oleg Shulyakov', 'name': 'olegshulyakov', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/danielhanchen/142796005127350,3263,"{'language': 'en', 'probability': 0.7535602450370789}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,998104791603272,"[{'type': 'text', 'value': 'NEW RELEASE: the ', 'raw': 'NEW RELEASE: the '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'sequelbox/Tachibana-QVQ'}, 'url': 'https://huggingface.co/datasets/sequelbox/Tachibana-QVQ', 'raw': 'https://huggingface.co/datasets/sequelbox/Tachibana-QVQ'}, {'type': 'text', 'value': ' dataset is here! Code-reasoning and code-instruct data generated with ', 'raw': ' dataset is here! Code-reasoning and code-instruct data generated with '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Qwen/QVQ-72B-Preview'}, 'url': 'https://huggingface.co/Qwen/QVQ-72B-Preview', 'raw': 'https://huggingface.co/Qwen/QVQ-72B-Preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Come check out QVQ's coding skills!"", 'raw': ""Come check out QVQ's coding skills!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'for everyone to use! ', 'raw': 'for everyone to use! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'more QVQ and Llama 3.1 405b datasets coming soon :)', 'raw': 'more QVQ and Llama 3.1 405b datasets coming soon :)'}]","NEW RELEASE: the https://huggingface.co/datasets/sequelbox/Tachibana-QVQ dataset is here! Code-reasoning and code-instruct data generated with https://huggingface.co/Qwen/QVQ-72B-Preview + +Come check out QVQ's coding skills! + +for everyone to use! + +more QVQ and Llama 3.1 405b datasets coming soon :)",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'victor', 'reonyy'], 'count': 3}]",2025-01-07 20:46:21,2025-01-07 20:46:21.052,[],/posts/sequelbox/998104791603272,1401,"{'language': 'en', 'probability': 0.7246623635292053}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,909593650912974,"[{'type': 'text', 'value': 'LLMs and LRMs - Logical Reasoning and Chain of Thought.', 'raw': 'LLMs and LRMs - Logical Reasoning and Chain of Thought.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is a read-aloud lecture to answer questions of using language reasoning techniques in advanced AGI style chain of thought AI pipelines. ', 'raw': 'This is a read-aloud lecture to answer questions of using language reasoning techniques in advanced AGI style chain of thought AI pipelines. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Produced using DeepResearchEvaluator located here: ', 'raw': 'Produced using DeepResearchEvaluator located here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/DeepResearchEvaluator'}, 'url': 'https://huggingface.co/spaces/awacke1/DeepResearchEvaluator', 'raw': 'https://huggingface.co/spaces/awacke1/DeepResearchEvaluator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Videos:', 'raw': 'Videos:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://x.com/Aaron_Wacker/status/1874835790087463063', 'raw': 'https://x.com/Aaron_Wacker/status/1874835790087463063'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=fW_A1hH_7RM', 'raw': 'https://www.youtube.com/watch?v=fW_A1hH_7RM'}, {'type': 'new_line', 'raw': '\n'}]","LLMs and LRMs - Logical Reasoning and Chain of Thought. + +This is a read-aloud lecture to answer questions of using language reasoning techniques in advanced AGI style chain of thought AI pipelines. + +Produced using DeepResearchEvaluator located here: https://huggingface.co/spaces/awacke1/DeepResearchEvaluator + + +Videos: +https://x.com/Aaron_Wacker/status/1874835790087463063 +https://www.youtube.com/watch?v=fW_A1hH_7RM +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/MLfTRAOyTHJyHO4DXs0Qq.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'MonsieurMory', 'reonyy', 'whitebill', 'robinsmits', 'den0620', 'Shiraztanvir123', 'AtAndDev'], 'count': 8}]",2025-01-02 15:20:35,2025-01-03 01:33:25.300,"[{'_id': '615ce9a95db25883c685c10d', 'avatarUrl': '/avatars/900e751907c8dc2035e806f727800ef3.svg', 'fullname': 'Samuel Lukudu', 'name': 'samu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/awacke1/909593650912974,2701,"{'language': 'en', 'probability': 0.808256208896637}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,599033181259244,"[{'type': 'text', 'value': '🎉𝐄𝐚𝐫𝐥𝐲 𝐍𝐞𝐰 𝐘𝐞𝐚𝐫 𝐫𝐞𝐥𝐞𝐚𝐬𝐞𝐬🎉', 'raw': '🎉𝐄𝐚𝐫𝐥𝐲 𝐍𝐞𝐰 𝐘𝐞𝐚𝐫 𝐫𝐞𝐥𝐞𝐚𝐬𝐞𝐬🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Hi HuggingFacers🤗, I decided to ship early this year, and here's what I came up with:"", 'raw': ""Hi HuggingFacers🤗, I decided to ship early this year, and here's what I came up with:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐏𝐝𝐟𝐈𝐭𝐃𝐨𝐰𝐧 (', 'raw': '𝐏𝐝𝐟𝐈𝐭𝐃𝐨𝐰𝐧 ('}, {'type': 'link', 'href': 'https://github.com/AstraBert/PdfItDown', 'raw': 'https://github.com/AstraBert/PdfItDown'}, {'type': 'text', 'value': "") - If you're like me, and you have all your RAG pipeline optimized for PDFs, but not for other data formats, here is your solution! With PdfItDown, you can convert Word documents, presentations, HTML pages, markdown sheets and (why not?) CSVs and XMLs in PDF format, for seamless integration with your RAG pipelines. Built upon MarkItDown by Microsoft "", 'raw': "") - If you're like me, and you have all your RAG pipeline optimized for PDFs, but not for other data formats, here is your solution! With PdfItDown, you can convert Word documents, presentations, HTML pages, markdown sheets and (why not?) CSVs and XMLs in PDF format, for seamless integration with your RAG pipelines. Built upon MarkItDown by Microsoft ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub Repo 👉 ', 'raw': 'GitHub Repo 👉 '}, {'type': 'link', 'href': 'https://github.com/AstraBert/PdfItDown', 'raw': 'https://github.com/AstraBert/PdfItDown'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PyPi Package 👉 ', 'raw': 'PyPi Package 👉 '}, {'type': 'link', 'href': 'https://pypi.org/project/pdfitdown/', 'raw': 'https://pypi.org/project/pdfitdown/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐒𝐞𝐧𝐓𝐫𝐄𝐯 𝐯𝟏.𝟎.𝟎 (', 'raw': '𝐒𝐞𝐧𝐓𝐫𝐄𝐯 𝐯𝟏.𝟎.𝟎 ('}, {'type': 'link', 'href': 'https://github.com/AstraBert/SenTrEv/tree/v1.0.0', 'raw': 'https://github.com/AstraBert/SenTrEv/tree/v1.0.0'}, {'type': 'text', 'value': ') - If you need to evaluate the 𝗿𝗲𝘁𝗿𝗶𝗲𝘃𝗮𝗹 performance of your 𝘁𝗲𝘅𝘁 𝗲𝗺𝗯𝗲𝗱𝗱𝗶𝗻𝗴 models, I have good news for you🥳🥳 ', 'raw': ') - If you need to evaluate the 𝗿𝗲𝘁𝗿𝗶𝗲𝘃𝗮𝗹 performance of your 𝘁𝗲𝘅𝘁 𝗲𝗺𝗯𝗲𝗱𝗱𝗶𝗻𝗴 models, I have good news for you🥳🥳 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The new release for 𝐒𝐞𝐧𝐓𝐫𝐄𝐯 now supports 𝗱𝗲𝗻𝘀𝗲 and 𝘀𝗽𝗮𝗿𝘀𝗲 retrieval (thanks to FastEmbed by Qdrant) with 𝘁𝗲𝘅𝘁-𝗯𝗮𝘀𝗲𝗱 𝗳𝗶𝗹𝗲 𝗳𝗼𝗿𝗺𝗮𝘁𝘀 (.docx, .pptx, .csv, .html, .xml, .md, .pdf) and new 𝗿𝗲𝗹𝗲𝘃𝗮𝗻𝗰𝗲 𝗺𝗲𝘁𝗿𝗶𝗰𝘀! ', 'raw': 'The new release for 𝐒𝐞𝐧𝐓𝐫𝐄𝐯 now supports 𝗱𝗲𝗻𝘀𝗲 and 𝘀𝗽𝗮𝗿𝘀𝗲 retrieval (thanks to FastEmbed by Qdrant) with 𝘁𝗲𝘅𝘁-𝗯𝗮𝘀𝗲𝗱 𝗳𝗶𝗹𝗲 𝗳𝗼𝗿𝗺𝗮𝘁𝘀 (.docx, .pptx, .csv, .html, .xml, .md, .pdf) and new 𝗿𝗲𝗹𝗲𝘃𝗮𝗻𝗰𝗲 𝗺𝗲𝘁𝗿𝗶𝗰𝘀! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub repo 👉 ', 'raw': 'GitHub repo 👉 '}, {'type': 'link', 'href': 'https://github.com/AstraBert/SenTrEv', 'raw': 'https://github.com/AstraBert/SenTrEv'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Release Notes 👉 ', 'raw': 'Release Notes 👉 '}, {'type': 'link', 'href': 'https://github.com/AstraBert/SenTrEv/releases/tag/v1.0.0', 'raw': 'https://github.com/AstraBert/SenTrEv/releases/tag/v1.0.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PyPi Package 👉 ', 'raw': 'PyPi Package 👉 '}, {'type': 'link', 'href': 'https://pypi.org/project/sentrev/', 'raw': 'https://pypi.org/project/sentrev/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Happy New Year and have fun!🥂', 'raw': 'Happy New Year and have fun!🥂'}]","🎉𝐄𝐚𝐫𝐥𝐲 𝐍𝐞𝐰 𝐘𝐞𝐚𝐫 𝐫𝐞𝐥𝐞𝐚𝐬𝐞𝐬🎉 + +Hi HuggingFacers🤗, I decided to ship early this year, and here's what I came up with: + +𝐏𝐝𝐟𝐈𝐭𝐃𝐨𝐰𝐧 (https://github.com/AstraBert/PdfItDown) - If you're like me, and you have all your RAG pipeline optimized for PDFs, but not for other data formats, here is your solution! With PdfItDown, you can convert Word documents, presentations, HTML pages, markdown sheets and (why not?) CSVs and XMLs in PDF format, for seamless integration with your RAG pipelines. Built upon MarkItDown by Microsoft +GitHub Repo 👉 https://github.com/AstraBert/PdfItDown +PyPi Package 👉 https://pypi.org/project/pdfitdown/ + +𝐒𝐞𝐧𝐓𝐫𝐄𝐯 𝐯𝟏.𝟎.𝟎 (https://github.com/AstraBert/SenTrEv/tree/v1.0.0) - If you need to evaluate the 𝗿𝗲𝘁𝗿𝗶𝗲𝘃𝗮𝗹 performance of your 𝘁𝗲𝘅𝘁 𝗲𝗺𝗯𝗲𝗱𝗱𝗶𝗻𝗴 models, I have good news for you🥳🥳 +The new release for 𝐒𝐞𝐧𝐓𝐫𝐄𝐯 now supports 𝗱𝗲𝗻𝘀𝗲 and 𝘀𝗽𝗮𝗿𝘀𝗲 retrieval (thanks to FastEmbed by Qdrant) with 𝘁𝗲𝘅𝘁-𝗯𝗮𝘀𝗲𝗱 𝗳𝗶𝗹𝗲 𝗳𝗼𝗿𝗺𝗮𝘁𝘀 (.docx, .pptx, .csv, .html, .xml, .md, .pdf) and new 𝗿𝗲𝗹𝗲𝘃𝗮𝗻𝗰𝗲 𝗺𝗲𝘁𝗿𝗶𝗰𝘀! +GitHub repo 👉 https://github.com/AstraBert/SenTrEv +Release Notes 👉 https://github.com/AstraBert/SenTrEv/releases/tag/v1.0.0 +PyPi Package 👉 https://pypi.org/project/sentrev/ + +Happy New Year and have fun!🥂",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'clem', 'whitebill', 'Sri-Vigneshwar-DJ'], 'count': 4}, {'reaction': '➕', 'users': ['fuzzy-mittenz'], 'count': 1}]",2025-01-02 09:57:20,2025-01-02 16:17:43.258,"[{'_id': '6764124332338e2e329bae7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/D4ZpXrtUaamP7JJWUSZDr.png', 'fullname': 'ksecurity', 'name': 'ksecurity45', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65e330e7edc2f7306e252448', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg', 'fullname': 'Clelia Astra Bertelli', 'name': 'as-cle-bert', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1593, 'isFollowing': False}]",/posts/as-cle-bert/599033181259244,2094,"{'language': 'en', 'probability': 0.7716745734214783}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64429aaf7feb866811b12f73/LxUVODgksCAlncbvbRGHu.png,73.0,Benjamin Paine,benjamin-paine,319392061336637,"[{'type': 'text', 'value': 'Hello HuggingFace 🤗, and happy new year! 🎆', 'raw': 'Hello HuggingFace 🤗, and happy new year! 🎆'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm thrilled to be releasing the first iteration of a project I've been working on for quite awhile now. It's called Taproot, and it's a seamlessly scalable open-source AI/ML inference engine designed for letting developers build real-time experiences clustered across a small-to-mid-sized cluster, without the burden of hyperscale infrastructure."", 'raw': ""I'm thrilled to be releasing the first iteration of a project I've been working on for quite awhile now. It's called Taproot, and it's a seamlessly scalable open-source AI/ML inference engine designed for letting developers build real-time experiences clustered across a small-to-mid-sized cluster, without the burden of hyperscale infrastructure.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Along with the server and task framework is a client library for node and the browser. And what good is a server and client without an app to go alongside it? To that end, I'm also releasing Anachrovox, a fun, real-time hands-free voice assistant that can run on mid-level devices in <12GB VRAM, with web search, weather, and other tools. It uses my real-time browser wake-word library to detect utterances of the phrase 'Hey Vox', 'Hi Vox', 'Okay Vox', 'Anachrovox' or just 'Vox' (alongside some others.)"", 'raw': ""Along with the server and task framework is a client library for node and the browser. And what good is a server and client without an app to go alongside it? To that end, I'm also releasing Anachrovox, a fun, real-time hands-free voice assistant that can run on mid-level devices in <12GB VRAM, with web search, weather, and other tools. It uses my real-time browser wake-word library to detect utterances of the phrase 'Hey Vox', 'Hi Vox', 'Okay Vox', 'Anachrovox' or just 'Vox' (alongside some others.)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Releasing this many things at once will definitely result in bugs, so please report them when sighted! Thank you all!', 'raw': 'Releasing this many things at once will definitely result in bugs, so please report them when sighted! Thank you all!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Taproot: ', 'raw': 'Taproot: '}, {'type': 'link', 'href': 'https://github.com/painebenjamin/taproot', 'raw': 'https://github.com/painebenjamin/taproot'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Taproot JS Client: ', 'raw': 'Taproot JS Client: '}, {'type': 'link', 'href': 'https://github.com/painebenjamin/taproot.js', 'raw': 'https://github.com/painebenjamin/taproot.js'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Anachrovox: ', 'raw': 'Anachrovox: '}, {'type': 'link', 'href': 'https://github.com/painebenjamin/anachrovox', 'raw': 'https://github.com/painebenjamin/anachrovox'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Anachrovox Spaces are networked together, balancing load across them to keep all front-ends responsive. You only have to choose what color you like the most!', 'raw': 'The Anachrovox Spaces are networked together, balancing load across them to keep all front-ends responsive. You only have to choose what color you like the most!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/benjamin-paine/anachrovox', 'raw': 'https://huggingface.co/spaces/benjamin-paine/anachrovox'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/benjamin-paine/anachrovox-amber', 'raw': 'https://huggingface.co/spaces/benjamin-paine/anachrovox-amber'}]","Hello HuggingFace 🤗, and happy new year! 🎆 + +I'm thrilled to be releasing the first iteration of a project I've been working on for quite awhile now. It's called Taproot, and it's a seamlessly scalable open-source AI/ML inference engine designed for letting developers build real-time experiences clustered across a small-to-mid-sized cluster, without the burden of hyperscale infrastructure. + +Along with the server and task framework is a client library for node and the browser. And what good is a server and client without an app to go alongside it? To that end, I'm also releasing Anachrovox, a fun, real-time hands-free voice assistant that can run on mid-level devices in <12GB VRAM, with web search, weather, and other tools. It uses my real-time browser wake-word library to detect utterances of the phrase 'Hey Vox', 'Hi Vox', 'Okay Vox', 'Anachrovox' or just 'Vox' (alongside some others.) + +Releasing this many things at once will definitely result in bugs, so please report them when sighted! Thank you all! + +Taproot: https://github.com/painebenjamin/taproot +Taproot JS Client: https://github.com/painebenjamin/taproot.js +Anachrovox: https://github.com/painebenjamin/anachrovox + +The Anachrovox Spaces are networked together, balancing load across them to keep all front-ends responsive. You only have to choose what color you like the most! + +https://huggingface.co/spaces/benjamin-paine/anachrovox +https://huggingface.co/spaces/benjamin-paine/anachrovox-amber","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64429aaf7feb866811b12f73/gInv8ezWCcSp9rU1868nI.gif'}]",[],"[{'reaction': '👍', 'users': ['Sol1986', 'John6666', 'rbgo', 'bhaskartripathi', 'Emsallam'], 'count': 5}, {'reaction': '🔥', 'users': ['John6666'], 'count': 1}, {'reaction': '❤️', 'users': ['bhaskartripathi'], 'count': 1}]",2025-01-02 01:20:36,2025-01-05 04:02:34.659,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '64429aaf7feb866811b12f73', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64429aaf7feb866811b12f73/LxUVODgksCAlncbvbRGHu.png', 'fullname': 'Benjamin Paine', 'name': 'benjamin-paine', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 73, 'isFollowing': False}]",/posts/benjamin-paine/319392061336637,3293,"{'language': 'en', 'probability': 0.8724250197410583}",12 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,930581853285482,"[{'type': 'text', 'value': 'hey nvidia, can you send me a gpu?', 'raw': 'hey nvidia, can you send me a gpu?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'comment or react if you want ~~me~~ to get one too. 👉👈', 'raw': 'comment or react if you want ~~me~~ to get one too. 👉👈'}]","hey nvidia, can you send me a gpu? +comment or react if you want ~~me~~ to get one too. 👉👈",[],[],"[{'reaction': '➕', 'users': ['pepper13', 'jacksonavila', 'bfletcher', 'cmontg', 'hdickerson', 'lamaraguilar', 'merlinherman', 'timmylai', 'wilsoneaton', 'as-cle-bert', 'reonyy', 'p3nGu1nZz', 'song376789', 'michaelyliu6', 'rahul-rokkun', 'wwkmind', 'berkaycamur', 'wolfspyre', 'XuehangCang', 'rupaak', 'iojvsuynv', 'ai-everyday', 'acelyc111', 'amacca17', 'JouharCheleri', 'tolgadev', 'stefan-it', 'fabiofernandes', 'LeonidasSTEM', 'cnmoro', 'shankars', 'Erno788622', 'TRV', 'lroggendorff', 'majid68', 'Ainonake', 'YaTharThShaRma999', 'Sal-ONE', 'qingy2024', 'not-lain', 'AtAndDev', 'Josephgflowers', 'CocoRoF', 'celsowm', 'likaixin', 'Smiley0707', 'Shashwath01', 'clem', 'zakariyamansuri1', 'nroggendorff'], 'count': 50}, {'reaction': '🔥', 'users': ['rahul-rokkun', 'majid68', 'YaTharThShaRma999', 'AtAndDev', 'disham993', 'thanhnx12', 'clem', 'kozak23', 'k33kx', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 18}, {'reaction': '😎', 'users': ['John6666', 'rahul-rokkun', 'fabiofernandes', 'majid68', 'YaTharThShaRma999', 'AtAndDev', 'thanhnx12', 'clem', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 17}, {'reaction': '🚀', 'users': ['rahul-rokkun', 'majid68', 'YaTharThShaRma999', 'AtAndDev', 'akahana', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 15}, {'reaction': '👀', 'users': ['rahul-rokkun', 'majid68', 'YaTharThShaRma999', 'AtAndDev', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 14}, {'reaction': '🤗', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'CocoRoF', 'celsowm', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 14}, {'reaction': '❤️', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'AtAndDev', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 13}, {'reaction': '🧠', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 12}, {'reaction': '���', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 12}, {'reaction': '🤝', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 12}, {'reaction': '😔', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'thanhnx12', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman'], 'count': 11}, {'reaction': '🤯', 'users': ['rahul-rokkun', 'YaTharThShaRma999', 'pepper13', 'wilsoneaton', 'bfletcher', 'cmontg', 'hdickerson', 'jacksonavila', 'lamaraguilar', 'merlinherman', 'nroggendorff'], 'count': 11}]",2025-01-02 00:18:34,2025-01-18 14:49:21.542,"[{'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '66aa9040c14f47b2b6c296e6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66aa9040c14f47b2b6c296e6/EFIZoCxgh45nY5twyAdhq.jpeg', 'fullname': 'dont care', 'name': 'dont-care', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '66d78facde54fea8a009927e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/9FKxA-hg9xm2oTFuIqgt3.png', 'fullname': 'Qingyun Li', 'name': 'qingy2024', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 62, 'isFollowing': False}, {'_id': '661d5c4c9e04cdf1869188fa', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/UDMzh0GmPEJLIdF6JTPq3.png', 'fullname': 'Gibson Pilconis', 'name': 'gibsonpil', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}, {'_id': '6127253029bcc4a9c4f45b7f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1637584282147-6127253029bcc4a9c4f45b7f.jpeg', 'fullname': 'fahrizalfarid', 'name': 'akahana', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6, 'isFollowing': False}, {'_id': '66f51c451ccc6405fce5f37e', 'avatarUrl': '/avatars/1feb258ad8eba68e49ab08f2458881b2.svg', 'fullname': 'Zakariya Mansuri', 'name': 'zakariyamansuri1', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6580862babafd960c82ed31a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6580862babafd960c82ed31a/ABP_U3y_nISbDGqCHwXa2.png', 'fullname': 'Chris', 'name': 'WesPro', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28, 'isFollowing': False}, {'_id': '66fe8fb27d722f0879b4631f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66fe8fb27d722f0879b4631f/-ZnoJVaxeGVicX1GpjizK.jpeg', 'fullname': 'Sk md saad amin', 'name': 'Reality123b', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}]",/posts/nroggendorff/930581853285482,6356,"{'language': 'en', 'probability': 0.8421728014945984}",33 +/avatars/a692e2e2a3b0222e2f8cdfc44ac8d64c.svg,29.0,its5Q,its5Q,217014225307389,"[{'type': 'text', 'value': ""Am I missing something, or there is still no way to filter by model size while searching for models? It has been a requested feature since 2022, but I haven't seen any updates since! With the amount of different models coming out, I think the size filter would be a great extension of the search functionality, especially when looking for smaller models, which are a lot less prevalent."", 'raw': ""Am I missing something, or there is still no way to filter by model size while searching for models? It has been a requested feature since 2022, but I haven't seen any updates since! With the amount of different models coming out, I think the size filter would be a great extension of the search functionality, especially when looking for smaller models, which are a lot less prevalent.""}]","Am I missing something, or there is still no way to filter by model size while searching for models? It has been a requested feature since 2022, but I haven't seen any updates since! With the amount of different models coming out, I think the size filter would be a great extension of the search functionality, especially when looking for smaller models, which are a lot less prevalent.",[],[],"[{'reaction': '🤯', 'users': ['reonyy', 'John6666', 'YaTharThShaRma999', 'disham993', 'khursani8', 'TahirC', 'AtAndDev', 'travisking'], 'count': 8}, {'reaction': '👍', 'users': ['puzzledust'], 'count': 1}]",2025-01-01 19:30:50,2025-01-02 17:30:42.553,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/its5Q/217014225307389,3082,"{'language': 'en', 'probability': 0.9666996598243713}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/645dbaa6f5760d1530d7580d/Bqob8arLZoHIgMwNZpL9I.jpeg,39.0,Simeon Emanuilov,s-emanuilov,658045857403337,"[{'type': 'text', 'value': 'Hey HF community! 👋 ', 'raw': 'Hey HF community! 👋 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Excited to share Monkt - a tool I built to solve the eternal headache of processing documents for ML/AI pipelines.', 'raw': 'Excited to share Monkt - a tool I built to solve the eternal headache of processing documents for ML/AI pipelines.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What it does: Converts PDFs, Word, PowerPoint, Excel, Web pages or raw HTML into clean Markdown or structured JSON.', 'raw': 'What it does: Converts PDFs, Word, PowerPoint, Excel, Web pages or raw HTML into clean Markdown or structured JSON.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Great for:', 'raw': 'Great for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔ LLM training dataset preparation;', 'raw': '✔ LLM training dataset preparation;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔ Knowledge base construction;', 'raw': '✔ Knowledge base construction;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔ Research paper processing;', 'raw': '✔ Research paper processing;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔ Technical documentation management.', 'raw': '✔ Technical documentation management.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It has API access for integration into ML pipelines.', 'raw': 'It has API access for integration into ML pipelines.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out at ', 'raw': 'Check it out at '}, {'type': 'link', 'href': 'https://monkt.com/', 'raw': 'https://monkt.com/'}, {'type': 'text', 'value': ' if you want to save time on document processing infrastructure.', 'raw': ' if you want to save time on document processing infrastructure.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Looking forward to your feedback! ', 'raw': 'Looking forward to your feedback! '}]","Hey HF community! 👋 + +Excited to share Monkt - a tool I built to solve the eternal headache of processing documents for ML/AI pipelines. + +What it does: Converts PDFs, Word, PowerPoint, Excel, Web pages or raw HTML into clean Markdown or structured JSON. + +Great for: +✔ LLM training dataset preparation; +✔ Knowledge base construction; +✔ Research paper processing; +✔ Technical documentation management. + +It has API access for integration into ML pipelines. + +Check it out at https://monkt.com/ if you want to save time on document processing infrastructure. + +Looking forward to your feedback! ",[],[],"[{'reaction': '👀', 'users': ['John6666', 'reonyy', 'Zilikon', 'csabakecskemeti', 'rishavkundu', 'Martins6'], 'count': 6}, {'reaction': '🤝', 'users': ['pritamdeb68', 'Martins6'], 'count': 2}, {'reaction': '➕', 'users': ['TRV', 'Martins6'], 'count': 2}, {'reaction': '👍', 'users': ['csabakecskemeti', 'Martins6'], 'count': 2}, {'reaction': '😎', 'users': ['Trevo01', 'Martins6'], 'count': 2}]",2025-01-01 16:21:45,2025-01-02 14:34:21.664,"[{'_id': '645dbaa6f5760d1530d7580d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/645dbaa6f5760d1530d7580d/Bqob8arLZoHIgMwNZpL9I.jpeg', 'fullname': 'Simeon Emanuilov', 'name': 's-emanuilov', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 39, 'isFollowing': False}, {'_id': '6764124332338e2e329bae7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/D4ZpXrtUaamP7JJWUSZDr.png', 'fullname': 'ksecurity', 'name': 'ksecurity45', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/s-emanuilov/658045857403337,2582,"{'language': 'en', 'probability': 0.8036255240440369}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png,5900.0,Joshua,Xenova,481580539559527,"[{'type': 'text', 'value': 'First project of 2025: Vision Transformer Explorer', 'raw': 'First project of 2025: Vision Transformer Explorer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I built a web app to interactively explore the self-attention maps produced by ViTs. This explains what the model is focusing on when making predictions, and provides insights into its inner workings! 🤯', 'raw': 'I built a web app to interactively explore the self-attention maps produced by ViTs. This explains what the model is focusing on when making predictions, and provides insights into its inner workings! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out yourself! 👇', 'raw': 'Try it out yourself! 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'webml-community/attention-visualization'}, 'url': 'https://huggingface.co/spaces/webml-community/attention-visualization', 'raw': 'https://huggingface.co/spaces/webml-community/attention-visualization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Source code: ', 'raw': 'Source code: '}, {'type': 'link', 'href': 'https://github.com/huggingface/transformers.js-examples/tree/main/attention-visualization', 'raw': 'https://github.com/huggingface/transformers.js-examples/tree/main/attention-visualization'}]","First project of 2025: Vision Transformer Explorer + +I built a web app to interactively explore the self-attention maps produced by ViTs. This explains what the model is focusing on when making predictions, and provides insights into its inner workings! 🤯 + +Try it out yourself! 👇 +https://huggingface.co/spaces/webml-community/attention-visualization + +Source code: https://github.com/huggingface/transformers.js-examples/tree/main/attention-visualization","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b253b7ac5ecaae3d1efe0c/hYtJ0qBY-utRcnonSjVwY.mp4'}]",[],"[{'reaction': '🔥', 'users': ['Kiruse', 'mtwohey2', 'Yasintuncer', 'John6666', 'darkvplayer', 'iky1e', 'reonyy', 'Zilikon', 'joseph-bou', 'Nn967771121118', 'prithivMLmods', 'JoPmt', 'kramp', 'FranckAbgrall', 'vikash0837', 'treek2345', 'clem', 'disham993', 'nmstoker', 'Snagy22000', 'byteprobe', 'massimoavvisati', 'Khushiyon', 'ChetanSaifsAi', 'Rustammm', 'agocorona'], 'count': 26}, {'reaction': '👍', 'users': ['vaibhavbrahme', 'CocoRoF', 'clem', 'Rustammm'], 'count': 4}, {'reaction': '👀', 'users': ['Rustammm'], 'count': 1}]",2025-01-01 15:54:24,2025-01-01 15:54:24.635,[],/posts/Xenova/481580539559527,8526,"{'language': 'en', 'probability': 0.8533672094345093}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/i8Xmex143RtZ2GJTasUxa.jpeg,20.0,tom,roseking,844242885214869,"[{'type': 'text', 'value': '🤗 Hugging Face Download Tool', 'raw': '🤗 Hugging Face Download Tool'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Hugging Face Download Tool is a sophisticated graphical user interface application designed to simplify the process of downloading resources from Hugging Face repositories. This tool addresses common challenges in model and file downloads through its intelligent features and user-friendly interface.', 'raw': 'The Hugging Face Download Tool is a sophisticated graphical user interface application designed to simplify the process of downloading resources from Hugging Face repositories. This tool addresses common challenges in model and file downloads through its intelligent features and user-friendly interface.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ✨ Key Features', 'raw': ' ✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🖥️ Intuitive graphical interface for easy operation', 'raw': '- 🖥️ Intuitive graphical interface for easy operation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🔄 Advanced retry mechanism with smart error handling', 'raw': '- 🔄 Advanced retry mechanism with smart error handling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ⏸️ Resume capability for interrupted downloads', 'raw': '- ⏸️ Resume capability for interrupted downloads'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📊 Real-time download status monitoring', 'raw': '- 📊 Real-time download status monitoring'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🔐 Secure access to private repositories via token authentication', 'raw': '- 🔐 Secure access to private repositories via token authentication'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ Technical Highlights', 'raw': '🛠️ Technical Highlights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The tool implements several advanced features to ensure reliable downloads:', 'raw': 'The tool implements several advanced features to ensure reliable downloads:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📦 Chunk-based downloading with 1MB segments', 'raw': '- 📦 Chunk-based downloading with 1MB segments'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ⚡ Adaptive retry intervals (5-300 seconds) based on error types', 'raw': '- ⚡ Adaptive retry intervals (5-300 seconds) based on error types'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🔌 Connection pooling for optimized performance', 'raw': '- 🔌 Connection pooling for optimized performance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🛡️ Built-in rate limiting protection', 'raw': '- 🛡️ Built-in rate limiting protection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🔑 Secure token handling for private repository access', 'raw': '- 🔑 Secure token handling for private repository access'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This tool is ideal for researchers, developers, and AI practitioners who regularly work with Hugging Face resources and need a reliable, user-friendly download solution. 💻 It supports all major operating systems and requires minimal setup, making it accessible to users of all technical levels. 🚀', 'raw': 'This tool is ideal for researchers, developers, and AI practitioners who regularly work with Hugging Face resources and need a reliable, user-friendly download solution. 💻 It supports all major operating systems and requires minimal setup, making it accessible to users of all technical levels. 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub:https://github.com/2404589803/hf_downloader', 'raw': 'GitHub:https://github.com/2404589803/hf_downloader'}, {'type': 'new_line', 'raw': '\n'}]","🤗 Hugging Face Download Tool + +The Hugging Face Download Tool is a sophisticated graphical user interface application designed to simplify the process of downloading resources from Hugging Face repositories. This tool addresses common challenges in model and file downloads through its intelligent features and user-friendly interface. + + ✨ Key Features +- 🖥️ Intuitive graphical interface for easy operation +- 🔄 Advanced retry mechanism with smart error handling +- ⏸️ Resume capability for interrupted downloads +- 📊 Real-time download status monitoring +- 🔐 Secure access to private repositories via token authentication + +🛠️ Technical Highlights +The tool implements several advanced features to ensure reliable downloads: +- 📦 Chunk-based downloading with 1MB segments +- ⚡ Adaptive retry intervals (5-300 seconds) based on error types +- 🔌 Connection pooling for optimized performance +- 🛡️ Built-in rate limiting protection +- 🔑 Secure token handling for private repository access + +This tool is ideal for researchers, developers, and AI practitioners who regularly work with Hugging Face resources and need a reliable, user-friendly download solution. 💻 It supports all major operating systems and requires minimal setup, making it accessible to users of all technical levels. 🚀 + +GitHub:https://github.com/2404589803/hf_downloader +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/649e263c402ad391e613ab3d/c5bAWnBZPUjLAekiGUvZm.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'reonyy', 'aust-t', 'daqc', 'ZeonLap', 'ai-everyday', 'AdinaY', 'Nymbo'], 'count': 8}, {'reaction': '🔥', 'users': ['gustavoia2023', 'AdinaY', 'madalone'], 'count': 3}, {'reaction': '😎', 'users': ['Sandiswiss1989', 'roseking', 'AdinaY'], 'count': 3}, {'reaction': '👍', 'users': ['adeebDkheel'], 'count': 1}]",2025-01-01 07:53:14,2025-01-05 03:48:02.627,"[{'_id': '6775388eb626c24f2027b608', 'avatarUrl': '/avatars/2b1a6b480cad1c7545283f5200dbf538.svg', 'fullname': 'Rakib Ahmed', 'name': 'rakibbd72', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '649e263c402ad391e613ab3d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/i8Xmex143RtZ2GJTasUxa.jpeg', 'fullname': 'tom', 'name': 'roseking', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 20, 'isFollowing': False}]",/posts/roseking/844242885214869,2725,"{'language': 'en', 'probability': 0.8430689573287964}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png,1048.0,Hexgrad,hexgrad,550329200722441,"[{'type': 'text', 'value': 'Happy New Year! 🌃 ', 'raw': 'Happy New Year! 🌃 '}, {'type': 'inline_code', 'code': 'af_sky', 'raw': '`af_sky`'}, {'type': 'text', 'value': ' landed in Kokoro, along with an article: ', 'raw': ' landed in Kokoro, along with an article: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'hexgrad/Kokoro-82M'}, 'url': 'https://hf.co/hexgrad/Kokoro-82M/blob/main/demo/restoring-sky.md', 'raw': 'https://hf.co/hexgrad/Kokoro-82M/blob/main/demo/restoring-sky.md'}]","Happy New Year! 🌃 `af_sky` landed in Kokoro, along with an article: https://hf.co/hexgrad/Kokoro-82M/blob/main/demo/restoring-sky.md",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'YaTharThShaRma999', 'JLouisBiz', 'linz'], 'count': 4}]",2025-01-01 04:11:31,2025-01-02 17:37:28.588,"[{'_id': '6629552c96f529a39bac7c89', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png', 'fullname': 'Hexgrad', 'name': 'hexgrad', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1048, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/hexgrad/550329200722441,8921,"{'language': 'en', 'probability': 0.7124412059783936}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg,69.0,Csaba Kecskemeti,csabakecskemeti,625183009398336,"[{'type': 'text', 'value': 'Happy New Year, Huggingface community!', 'raw': 'Happy New Year, Huggingface community!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In 2025, I'll continue my quantization (and some fine-tuning) efforts to support the open-source AI and Make knowledge free for everyone."", 'raw': ""In 2025, I'll continue my quantization (and some fine-tuning) efforts to support the open-source AI and Make knowledge free for everyone.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'DevQuasar'}, 'url': 'https://huggingface.co/DevQuasar', 'raw': 'https://huggingface.co/DevQuasar', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/o_HhUnXb_PgyYlqJ6gfEO.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://devquasar.com/', 'raw': 'https://devquasar.com/'}]","Happy New Year, Huggingface community! +In 2025, I'll continue my quantization (and some fine-tuning) efforts to support the open-source AI and Make knowledge free for everyone. + +https://huggingface.co/DevQuasar +https://devquasar.com/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/_x0ZLzzLW2M27lAQTL5li.webp'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'HHorne88', 'JLouisBiz', 'brando-slc', 'clem'], 'count': 5}, {'reaction': '❤️', 'users': ['fuzzy-mittenz', 'TRV', 'clem'], 'count': 3}]",2025-01-01 02:20:51,2025-01-02 17:41:10.960,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/csabakecskemeti/625183009398336,1555,"{'language': 'en', 'probability': 0.8138210773468018}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,918308922536711,"[{'type': 'text', 'value': 'The Chinese community is shipping 🚢 ', 'raw': 'The Chinese community is shipping 🚢 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DeepSeek V3 (685 B MoE) has quietly released on the hub! ', 'raw': 'DeepSeek V3 (685 B MoE) has quietly released on the hub! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Base: ', 'raw': 'Base: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-V3-Base'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-V3-Base', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-V3-Base'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instruct: ', 'raw': 'Instruct: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'deepseek-ai/DeepSeek-V3'}, 'url': 'https://huggingface.co/deepseek-ai/DeepSeek-V3', 'raw': 'https://huggingface.co/deepseek-ai/DeepSeek-V3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Can’t wait to see what’s next! ', 'raw': 'Can’t wait to see what’s next! '}]","The Chinese community is shipping 🚢 + +DeepSeek V3 (685 B MoE) has quietly released on the hub! +Base: https://huggingface.co/deepseek-ai/DeepSeek-V3-Base +Instruct: https://huggingface.co/deepseek-ai/DeepSeek-V3 + +Can’t wait to see what’s next! ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63a369d98c0c89dcae3b8329/RyTpm6JAlKiHx1K-Zov9p.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['Nielly', 'darkzbaron', 'AtAndDev', 'reonyy', 'Aurelien-Morgan', 'John6666', 'Blucky', 'coderfpv', 'dillfrescott', 'suayptalha', 'OSO-AI', 'victor', 'strangetoad'], 'count': 13}, {'reaction': '🚀', 'users': ['prithivMLmods', 'reonyy', 'dillfrescott', 'AtAndDev', 'victor', 'strangetoad', 'ajcs'], 'count': 7}, {'reaction': '👍', 'users': ['dillfrescott', 'AtAndDev', 'arashiun'], 'count': 3}, {'reaction': '❤️', 'users': ['dillfrescott', 'AtAndDev'], 'count': 2}, {'reaction': '🤗', 'users': ['dillfrescott', 'AtAndDev'], 'count': 2}]",2024-12-26 08:59:58,2024-12-27 16:54:12.585,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/AdinaY/918308922536711,3636,"{'language': 'en', 'probability': 0.7601616978645325}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png,1048.0,Hexgrad,hexgrad,589907361070461,"[{'type': 'text', 'value': 'Merry Christmas! 🎄 Open sourced a small TTS model at ', 'raw': 'Merry Christmas! 🎄 Open sourced a small TTS model at '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'hexgrad/Kokoro-82M'}, 'url': 'https://huggingface.co/hexgrad/Kokoro-82M', 'raw': 'https://huggingface.co/hexgrad/Kokoro-82M'}]",Merry Christmas! 🎄 Open sourced a small TTS model at https://huggingface.co/hexgrad/Kokoro-82M,[],[],"[{'reaction': '🔥', 'users': ['danlund', 'ozguntosun', 'John6666', 'ecyht2', 'gjyotin305', 'kristaller486', 'bendangelo', 'reonyy', 'Pendrokar', 'do-me', 'britny', 'AtAndDev', 'g-ronimo', 'YaTharThShaRma999', 's0me-0ne', 'nicolay-r', 'victor', 'fireblade2534', 'linz'], 'count': 19}, {'reaction': '❤️', 'users': ['Harbous', 'JLouisBiz', 'Wismut', 'fireblade2534', 'linz'], 'count': 5}]",2024-12-26 00:49:15,2024-12-27 17:06:10.367,"[{'_id': '676d052d628bc826d1c09cc6', 'avatarUrl': '/avatars/a94810f1245c0b13b35bba244296a6b8.svg', 'fullname': 'Kareem', 'name': 'Dxcanada2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/hexgrad/589907361070461,4081,"{'language': 'en', 'probability': 0.6597874164581299}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63c09b32dd793d5a62895a95/SFdGQeiZpD5oxkl66wK2u.jpeg,48.0,Duskfall Crew,Duskfallcrew,227563947266760,"[{'type': 'text', 'value': 'Just finally ... added my HF backup tool to an HF repo... after two years roughly of making this - ', 'raw': 'Just finally ... added my HF backup tool to an HF repo... after two years roughly of making this - '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's inspired KINDA by Camenduru but his one stopped working and i wish i had the original code for it so i could reformat the A111 extension he had..."", 'raw': ""It's inspired KINDA by Camenduru but his one stopped working and i wish i had the original code for it so i could reformat the A111 extension he had...""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Becausei 'm TRYING to make an A111 extension and maybe a custom comfyUI node:"", 'raw': ""Becausei 'm TRYING to make an A111 extension and maybe a custom comfyUI node:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Duskfallcrew/Huggingface_Backup'}, 'url': 'https://huggingface.co/Duskfallcrew/Huggingface_Backup', 'raw': 'https://huggingface.co/Duskfallcrew/Huggingface_Backup'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I originally patched this from Everydream2trainer and some other stuff. ', 'raw': 'I originally patched this from Everydream2trainer and some other stuff. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So the credits stay.', 'raw': 'So the credits stay.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm amazed at this lol."", 'raw': ""I'm amazed at this lol.""}]","Just finally ... added my HF backup tool to an HF repo... after two years roughly of making this - +It's inspired KINDA by Camenduru but his one stopped working and i wish i had the original code for it so i could reformat the A111 extension he had... + +Becausei 'm TRYING to make an A111 extension and maybe a custom comfyUI node: + +https://huggingface.co/Duskfallcrew/Huggingface_Backup + +I originally patched this from Everydream2trainer and some other stuff. +So the credits stay. + +I'm amazed at this lol.",[],[],"[{'reaction': '👍', 'users': ['John6666', 'nicolay-r', 'quantflex'], 'count': 3}, {'reaction': '🚀', 'users': ['John6666', 'quantflex'], 'count': 2}, {'reaction': '❤️', 'users': ['quantflex'], 'count': 1}]",2024-12-25 23:49:33,2024-12-28 05:39:00.448,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '63c09b32dd793d5a62895a95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63c09b32dd793d5a62895a95/SFdGQeiZpD5oxkl66wK2u.jpeg', 'fullname': 'Duskfall Crew', 'name': 'Duskfallcrew', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}]",/posts/Duskfallcrew/227563947266760,1084,"{'language': 'en', 'probability': 0.947741687297821}",8 +https://cdn-avatars.huggingface.co/v1/production/uploads/644ba00b0fbe4830f192f442/BrcDc4ngokSApFELEu357.jpeg,46.0,"Nitya Narasimhan, PhD",nityan,833958149002296,"[{'type': 'text', 'value': '#001 | A journey into open-source Hugging Face Models on Azure AI', 'raw': '#001 | A journey into open-source Hugging Face Models on Azure AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'December is the month for New Year resolutions - and this year I am determined to write more on Hugging Face. I kept putting this off thinking I wanted to have time to craft perfect long-form articles, but then I discovered we can do quick posts. So why wait till January?', 'raw': 'December is the month for New Year resolutions - and this year I am determined to write more on Hugging Face. I kept putting this off thinking I wanted to have time to craft perfect long-form articles, but then I discovered we can do quick posts. So why wait till January?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I am a PhD, a Polyglot, a Parent, a Visual Storyteller, a Community Builder - and an AI Advocate at Microsoft. However, if I look back on my 25+ years in tech, what I love most is to help people learn by making complex concepts feel more accessible and actionable regardless of your background or expertise. And in 2025, I want to use a #NityaLearnsAI tagline as a way to share my learning journey, explore the vast space of AI tools and technologies, amplify our open-source community and put the fun back in fundamentals. I hope you find it useful and will join me!', 'raw': 'I am a PhD, a Polyglot, a Parent, a Visual Storyteller, a Community Builder - and an AI Advocate at Microsoft. However, if I look back on my 25+ years in tech, what I love most is to help people learn by making complex concepts feel more accessible and actionable regardless of your background or expertise. And in 2025, I want to use a #NityaLearnsAI tagline as a way to share my learning journey, explore the vast space of AI tools and technologies, amplify our open-source community and put the fun back in fundamentals. I hope you find it useful and will join me!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My first post is on this Microsoft Ignite theater session delivered in Nov:', 'raw': 'My first post is on this Microsoft Ignite theater session delivered in Nov:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://ignite.microsoft.com/en-US/sessions/THR502?source=sessions', 'raw': 'https://ignite.microsoft.com/en-US/sessions/THR502?source=sessions'}, {'type': 'text', 'value': ' It was not recorded but can find the slides here: ', 'raw': ' It was not recorded but can find the slides here: '}, {'type': 'link', 'href': 'https://speakerdeck.com/nitya/thr502-journey-into-open-source-hugging-face-models-on-azure-ai', 'raw': 'https://speakerdeck.com/nitya/thr502-journey-into-open-source-hugging-face-models-on-azure-ai'}, {'type': 'text', 'value': ' - and the illustrated guide attached below summarizes the talk in one big picture.', 'raw': ' - and the illustrated guide attached below summarizes the talk in one big picture.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'At the core, this is about my growing interest in **Model Choice** and learning more about not just frontier models but the much larger ecosystem of open-source variants and the community creators who build them. See:', 'raw': 'At the core, this is about my growing interest in **Model Choice** and learning more about not just frontier models but the much larger ecosystem of open-source variants and the community creators who build them. See:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Oct / The Future of AI is model choice / ', 'raw': '1. Oct / The Future of AI is model choice / '}, {'type': 'link', 'href': 'https://techcommunity.microsoft.com/blog/aiplatformblog/the-future-of-ai-is-model-choice---from-structured-process-to-seamless-platform/4284091', 'raw': 'https://techcommunity.microsoft.com/blog/aiplatformblog/the-future-of-ai-is-model-choice---from-structured-process-to-seamless-platform/4284091'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Sep / HF Models Recap / ', 'raw': '2. Sep / HF Models Recap / '}, {'type': 'link', 'href': 'https://techcommunity.microsoft.com/blog/aiplatformblog/new-hugging-face-models-on-azure-ai-phi-3-variants-from', 'raw': 'https://techcommunity.microsoft.com/blog/aiplatformblog/new-hugging-face-models-on-azure-ai-phi-3-variants-from'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Aug / HF Models Recap / ', 'raw': '3. Aug / HF Models Recap / '}, {'type': 'link', 'href': 'https://techcommunity.microsoft.com/blog/aiplatformblog/new-hugging-face-models-on-azure-ai-multilingual-slm-and-biomed--july-2024-updat/4211881', 'raw': 'https://techcommunity.microsoft.com/blog/aiplatformblog/new-hugging-face-models-on-azure-ai-multilingual-slm-and-biomed--july-2024-updat/4211881'}]","#001 | A journey into open-source Hugging Face Models on Azure AI + +December is the month for New Year resolutions - and this year I am determined to write more on Hugging Face. I kept putting this off thinking I wanted to have time to craft perfect long-form articles, but then I discovered we can do quick posts. So why wait till January? + +I am a PhD, a Polyglot, a Parent, a Visual Storyteller, a Community Builder - and an AI Advocate at Microsoft. However, if I look back on my 25+ years in tech, what I love most is to help people learn by making complex concepts feel more accessible and actionable regardless of your background or expertise. And in 2025, I want to use a #NityaLearnsAI tagline as a way to share my learning journey, explore the vast space of AI tools and technologies, amplify our open-source community and put the fun back in fundamentals. I hope you find it useful and will join me! + +My first post is on this Microsoft Ignite theater session delivered in Nov: +https://ignite.microsoft.com/en-US/sessions/THR502?source=sessions It was not recorded but can find the slides here: https://speakerdeck.com/nitya/thr502-journey-into-open-source-hugging-face-models-on-azure-ai - and the illustrated guide attached below summarizes the talk in one big picture. + +At the core, this is about my growing interest in **Model Choice** and learning more about not just frontier models but the much larger ecosystem of open-source variants and the community creators who build them. See: + +1. Oct / The Future of AI is model choice / https://techcommunity.microsoft.com/blog/aiplatformblog/the-future-of-ai-is-model-choice---from-structured-process-to-seamless-platform/4284091 +2. Sep / HF Models Recap / https://techcommunity.microsoft.com/blog/aiplatformblog/new-hugging-face-models-on-azure-ai-phi-3-variants-from +3. Aug / HF Models Recap / https://techcommunity.microsoft.com/blog/aiplatformblog/new-hugging-face-models-on-azure-ai-multilingual-slm-and-biomed--july-2024-updat/4211881","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/644ba00b0fbe4830f192f442/WEgyt1J52JgGvMDnO_YjN.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'nicolay-r'], 'count': 2}, {'reaction': '👍', 'users': ['sfurry'], 'count': 1}]",2024-12-25 17:07:09,2024-12-27 17:04:25.543,"[{'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/nityan/833958149002296,1787,"{'language': 'en', 'probability': 0.863193929195404}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,188121237623202,"[{'type': 'text', 'value': "" 📢 If you're aimed at quick experiment with LLM and known Chain-of-Thought (CoT) / prompt schema with no-string dependencies, then I have something relevant for you to share 💎"", 'raw': "" 📢 If you're aimed at quick experiment with LLM and known Chain-of-Thought (CoT) / prompt schema with no-string dependencies, then I have something relevant for you to share 💎""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So far I released the updated version 📦 bulk-chain-0.25.0 📦, which is aimed at bringing accessible API for an instant LLM application towards massive data iterators using via predefined prompt schema 🎊', 'raw': 'So far I released the updated version 📦 bulk-chain-0.25.0 📦, which is aimed at bringing accessible API for an instant LLM application towards massive data iterators using via predefined prompt schema 🎊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📦: ', 'raw': '📦: '}, {'type': 'link', 'href': 'https://pypi.org/project/bulk-chain/0.25.0/', 'raw': 'https://pypi.org/project/bulk-chain/0.25.0/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟: ', 'raw': '🌟: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-chain', 'raw': 'https://github.com/nicolay-r/bulk-chain'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📘: ', 'raw': '📘: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-chain/issues/26', 'raw': 'https://github.com/nicolay-r/bulk-chain/issues/26'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The key updates of the most recent release are:', 'raw': 'The key updates of the most recent release are:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 🪶 No-string (empty dependencies): you can use any framework / API for LLM.', 'raw': '✅ 🪶 No-string (empty dependencies): you can use any framework / API for LLM.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 🐍 Python API support (see first screenshot 📸).', 'raw': '✅ 🐍 Python API support (see first screenshot 📸).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 💥 Native try-catch wrapping to guarantee no-data-lost on using remote providers especially: OpenAI, ReplicateIO, OpenRouter, etc.', 'raw': '✅ 💥 Native try-catch wrapping to guarantee no-data-lost on using remote providers especially: OpenAI, ReplicateIO, OpenRouter, etc.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 🔥 Batching mode support: you may wrap for handling batches to significantly boost the performance 🚀 (see screenshot below for bath enabling 📸)', 'raw': '✅ 🔥 Batching mode support: you may wrap for handling batches to significantly boost the performance 🚀 (see screenshot below for bath enabling 📸)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 🔧 Fixed a lot of minor bugs', 'raw': '✅ 🔧 Fixed a lot of minor bugs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Quick start on GoogleColab:', 'raw': 'Quick start on GoogleColab:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📙: ', 'raw': '📙: '}, {'type': 'link', 'href': 'https://colab.research.google.com/github/nicolay-r/bulk-chain/blob/master/bulk_chain_tutorial.ipynb', 'raw': 'https://colab.research.google.com/github/nicolay-r/bulk-chain/blob/master/bulk_chain_tutorial.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📘 The wiki of the project is available here:', 'raw': '📘 The wiki of the project is available here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-chain/wiki/Project-Documentation', 'raw': 'https://github.com/nicolay-r/bulk-chain/wiki/Project-Documentation'}, {'type': 'new_line', 'raw': '\n'}]"," 📢 If you're aimed at quick experiment with LLM and known Chain-of-Thought (CoT) / prompt schema with no-string dependencies, then I have something relevant for you to share 💎 + +So far I released the updated version 📦 bulk-chain-0.25.0 📦, which is aimed at bringing accessible API for an instant LLM application towards massive data iterators using via predefined prompt schema 🎊 + +📦: https://pypi.org/project/bulk-chain/0.25.0/ +🌟: https://github.com/nicolay-r/bulk-chain +📘: https://github.com/nicolay-r/bulk-chain/issues/26 + +The key updates of the most recent release are: +✅ 🪶 No-string (empty dependencies): you can use any framework / API for LLM. +✅ 🐍 Python API support (see first screenshot 📸). +✅ 💥 Native try-catch wrapping to guarantee no-data-lost on using remote providers especially: OpenAI, ReplicateIO, OpenRouter, etc. +✅ 🔥 Batching mode support: you may wrap for handling batches to significantly boost the performance 🚀 (see screenshot below for bath enabling 📸) +✅ 🔧 Fixed a lot of minor bugs + +Quick start on GoogleColab: +📙: https://colab.research.google.com/github/nicolay-r/bulk-chain/blob/master/bulk_chain_tutorial.ipynb + +📘 The wiki of the project is available here: +https://github.com/nicolay-r/bulk-chain/wiki/Project-Documentation +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/DZamYPKl0WhFxLKRnnNjO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/tVI_FOA80G1TA5Epi8ifw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/GFX7nJsw9e-Eo6ByzqYop.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/zoLANshpbqaFHE6abtvwj.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/yOZAbAqvYdlVHeIS-X6aI.png'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'reonyy', 'awkzardxxx', 'AtAndDev', 'haider482'], 'count': 5}, {'reaction': '❤️', 'users': ['reonyy', 'AtAndDev'], 'count': 2}]",2024-12-25 13:13:48,2024-12-25 13:15:40.833,[],/posts/nicolay-r/188121237623202,2103,"{'language': 'en', 'probability': 0.7296765446662903}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nMuuypcbWFQcDD5xNo4Zt.jpeg,89.0,Şuayp Talha Kocabay,suayptalha,178409152704652,"[{'type': 'text', 'value': '🚀 Introducing Substitution Cipher Solvers!', 'raw': '🚀 Introducing Substitution Cipher Solvers!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As ', 'raw': 'As '}, {'type': 'mention', 'user': 'suayptalha', 'raw': '@suayptalha'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'Synd209', 'raw': '@Synd209'}, {'type': 'text', 'value': ' we are thrilled to share an update!', 'raw': ' we are thrilled to share an update!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔑 This project contains a text-to-text model designed to decrypt English and Turkish text encoded using a substitution cipher. In a substitution cipher, each letter in the plaintext is replaced by a corresponding, unique letter to form the ciphertext. The model leverages statistical and linguistic properties of English to make educated guesses about the letter substitutions, aiming to recover the original plaintext message.', 'raw': '🔑 This project contains a text-to-text model designed to decrypt English and Turkish text encoded using a substitution cipher. In a substitution cipher, each letter in the plaintext is replaced by a corresponding, unique letter to form the ciphertext. The model leverages statistical and linguistic properties of English to make educated guesses about the letter substitutions, aiming to recover the original plaintext message.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'These models were fine-tuned on T5-base. The models are for monoalphabetic English and Turkish substitution ciphers, and they output decoded text and the alphabet with an accuracy that has never been achieved before!', 'raw': 'These models were fine-tuned on T5-base. The models are for monoalphabetic English and Turkish substitution ciphers, and they output decoded text and the alphabet with an accuracy that has never been achieved before!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Example:', 'raw': 'Example:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Encoded text: Z hztwgx tstcsf qf z ulooqfe osfuqb tzx uezx awej z ozewsbe vlfwby fsmqisfx.', 'raw': 'Encoded text: Z hztwgx tstcsf qf z ulooqfe osfuqb tzx uezx awej z ozewsbe vlfwby fsmqisfx.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Decoded text: A family member or a support person may stay with a patient during recovery.', 'raw': 'Decoded text: A family member or a support person may stay with a patient during recovery.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model Collection Link: ', 'raw': 'Model Collection Link: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Cipher-AI/substitution-cipher-solvers-6731ebd22f0f0d8e0e2e2e00'}, 'url': 'https://huggingface.co/collections/Cipher-AI/substitution-cipher-solvers-6731ebd22f0f0d8e0e2e2e00', 'raw': 'https://huggingface.co/collections/Cipher-AI/substitution-cipher-solvers-6731ebd22f0f0d8e0e2e2e00'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Organization Link: ', 'raw': 'Organization Link: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'Cipher-AI'}, 'url': 'https://huggingface.co/Cipher-AI', 'raw': 'https://huggingface.co/Cipher-AI', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/668c299c95a7493f14fe3bcc/v5LshHGx42melDi7EfkBw.png'}]","🚀 Introducing Substitution Cipher Solvers! + +As @suayptalha and @Synd209 we are thrilled to share an update! + +🔑 This project contains a text-to-text model designed to decrypt English and Turkish text encoded using a substitution cipher. In a substitution cipher, each letter in the plaintext is replaced by a corresponding, unique letter to form the ciphertext. The model leverages statistical and linguistic properties of English to make educated guesses about the letter substitutions, aiming to recover the original plaintext message. + +These models were fine-tuned on T5-base. The models are for monoalphabetic English and Turkish substitution ciphers, and they output decoded text and the alphabet with an accuracy that has never been achieved before! + +Example: + +Encoded text: Z hztwgx tstcsf qf z ulooqfe osfuqb tzx uezx awej z ozewsbe vlfwby fsmqisfx. + +Decoded text: A family member or a support person may stay with a patient during recovery. + +Model Collection Link: https://huggingface.co/collections/Cipher-AI/substitution-cipher-solvers-6731ebd22f0f0d8e0e2e2e00 + +Organization Link: https://huggingface.co/Cipher-AI","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/668c299c95a7493f14fe3bcc/6kdiLPmDfT4NL8dYPDn6w.png'}]","[{'_id': '668c299c95a7493f14fe3bcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nMuuypcbWFQcDD5xNo4Zt.jpeg', 'fullname': 'Şuayp Talha Kocabay', 'name': 'suayptalha', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 89}, {'_id': '671e40976938ffad9b16feef', 'avatarUrl': '/avatars/b74ca05105d51ad2b695b8e8d6ddaf7f.svg', 'fullname': 'Kutay Demirbaş', 'name': 'Synd209', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}]","[{'reaction': '🔥', 'users': ['Batuffolo753', 'John6666', 'nicolay-r', 'suayptalha', 'logame07', 'reonyy', 'AtAndDev'], 'count': 7}, {'reaction': '❤️', 'users': ['Batuffolo753', 'reonyy', 'AtAndDev'], 'count': 3}]",2024-12-25 08:23:27,2025-01-05 17:18:23.871,"[{'_id': '664c783cd23bbae0d6611395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/664c783cd23bbae0d6611395/wEtzA5vgVwa7udMKE6q4f.png', 'fullname': 'Pierre', 'name': 'Ongalalli', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '668c299c95a7493f14fe3bcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nMuuypcbWFQcDD5xNo4Zt.jpeg', 'fullname': 'Şuayp Talha Kocabay', 'name': 'suayptalha', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 89, 'isFollowing': False}]",/posts/suayptalha/178409152704652,2513,"{'language': 'en', 'probability': 0.8893490433692932}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,705288275447563,"[{'type': 'text', 'value': 'Best open source Image to Video CogVideoX1.5-5B-I2V is pretty decent and optimized for low VRAM machines with high resolution - native resolution is 1360px and up to 10 seconds 161 frames - audios generated with new open source audio model', 'raw': 'Best open source Image to Video CogVideoX1.5-5B-I2V is pretty decent and optimized for low VRAM machines with high resolution - native resolution is 1360px and up to 10 seconds 161 frames - audios generated with new open source audio model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full YouTube tutorial for CogVideoX1.5-5B-I2V : ', 'raw': 'Full YouTube tutorial for CogVideoX1.5-5B-I2V : '}, {'type': 'link', 'href': 'https://youtu.be/5UCkMzP2VLE', 'raw': 'https://youtu.be/5UCkMzP2VLE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1-Click Windows, RunPod and Massed Compute installers : ', 'raw': '1-Click Windows, RunPod and Massed Compute installers : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/112848192', 'raw': 'https://www.patreon.com/posts/112848192'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.patreon.com/posts/112848192', 'raw': 'https://www.patreon.com/posts/112848192'}, {'type': 'text', 'value': ' - installs into Python 3.11 VENV', 'raw': ' - installs into Python 3.11 VENV'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official Hugging Face repo of CogVideoX1.5-5B-I2V : ', 'raw': 'Official Hugging Face repo of CogVideoX1.5-5B-I2V : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'THUDM/CogVideoX1.5-5B-I2V'}, 'url': 'https://huggingface.co/THUDM/CogVideoX1.5-5B-I2V', 'raw': 'https://huggingface.co/THUDM/CogVideoX1.5-5B-I2V'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official github repo : ', 'raw': 'Official github repo : '}, {'type': 'link', 'href': 'https://github.com/THUDM/CogVideo', 'raw': 'https://github.com/THUDM/CogVideo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Used prompts to generate videos txt file : ', 'raw': 'Used prompts to generate videos txt file : '}, {'type': 'link', 'href': 'https://gist.github.com/FurkanGozukara/471db7b987ab8d9877790358c126ac05', 'raw': 'https://gist.github.com/FurkanGozukara/471db7b987ab8d9877790358c126ac05'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo images shared in : ', 'raw': 'Demo images shared in : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/112848192', 'raw': 'https://www.patreon.com/posts/112848192'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I used 1360x768px images at 16 FPS and 81 frames = 5 seconds', 'raw': 'I used 1360x768px images at 16 FPS and 81 frames = 5 seconds'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+1 frame coming from initial image', 'raw': '+1 frame coming from initial image'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also I have enabled all the optimizations shared on Hugging Face', 'raw': 'Also I have enabled all the optimizations shared on Hugging Face'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'pipe.enable_sequential_cpu_offload()', 'raw': 'pipe.enable_sequential_cpu_offload()'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'pipe.vae.enable_slicing()', 'raw': 'pipe.vae.enable_slicing()'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'pipe.vae.enable_tiling()', 'raw': 'pipe.vae.enable_tiling()'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'quantization = int8_weight_only - you need TorchAO and DeepSpeed works great on Windows with Python 3.11 VENV', 'raw': 'quantization = int8_weight_only - you need TorchAO and DeepSpeed works great on Windows with Python 3.11 VENV'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Used audio model : ', 'raw': 'Used audio model : '}, {'type': 'link', 'href': 'https://github.com/hkchengrex/MMAudio', 'raw': 'https://github.com/hkchengrex/MMAudio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1-Click Windows, RunPod and Massed Compute Installers for MMAudio : ', 'raw': '1-Click Windows, RunPod and Massed Compute Installers for MMAudio : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/117990364', 'raw': 'https://www.patreon.com/posts/117990364'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.patreon.com/posts/117990364', 'raw': 'https://www.patreon.com/posts/117990364'}, {'type': 'text', 'value': ' - Installs into Python 3.10 VENV', 'raw': ' - Installs into Python 3.10 VENV'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Used very simple prompts - it fails when there is human in input video so use text to audio in such cases', 'raw': 'Used very simple prompts - it fails when there is human in input video so use text to audio in such cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I also tested some VRAM usages for CogVideoX1.5-5B-I2V', 'raw': 'I also tested some VRAM usages for CogVideoX1.5-5B-I2V'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resolutions and here their VRAM requirements - may work on lower VRAM GPUs too but slower', 'raw': 'Resolutions and here their VRAM requirements - may work on lower VRAM GPUs too but slower'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '512x288 - 41 frames : 7700 MB , 576x320 - 41 frames : 7900 MB', 'raw': '512x288 - 41 frames : 7700 MB , 576x320 - 41 frames : 7900 MB'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '576x320 - 81 frames : 8850 MB , 704x384 - 81 frames : 8950 MB', 'raw': '576x320 - 81 frames : 8850 MB , 704x384 - 81 frames : 8950 MB'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '768x432 - 81 frames : 10600 MB , 896x496 - 81 frames : 12050 MB', 'raw': '768x432 - 81 frames : 10600 MB , 896x496 - 81 frames : 12050 MB'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '896x496 - 81 frames : 12050 MB , 960x528 - 81 frames : 12850 MB', 'raw': '896x496 - 81 frames : 12050 MB , 960x528 - 81 frames : 12850 MB'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Best open source Image to Video CogVideoX1.5-5B-I2V is pretty decent and optimized for low VRAM machines with high resolution - native resolution is 1360px and up to 10 seconds 161 frames - audios generated with new open source audio model + +Full YouTube tutorial for CogVideoX1.5-5B-I2V : https://youtu.be/5UCkMzP2VLE + +1-Click Windows, RunPod and Massed Compute installers : https://www.patreon.com/posts/112848192 + +https://www.patreon.com/posts/112848192 - installs into Python 3.11 VENV + +Official Hugging Face repo of CogVideoX1.5-5B-I2V : https://huggingface.co/THUDM/CogVideoX1.5-5B-I2V + +Official github repo : https://github.com/THUDM/CogVideo + +Used prompts to generate videos txt file : https://gist.github.com/FurkanGozukara/471db7b987ab8d9877790358c126ac05 + +Demo images shared in : https://www.patreon.com/posts/112848192 + +I used 1360x768px images at 16 FPS and 81 frames = 5 seconds + ++1 frame coming from initial image + +Also I have enabled all the optimizations shared on Hugging Face + +pipe.enable_sequential_cpu_offload() + +pipe.vae.enable_slicing() + +pipe.vae.enable_tiling() + +quantization = int8_weight_only - you need TorchAO and DeepSpeed works great on Windows with Python 3.11 VENV + +Used audio model : https://github.com/hkchengrex/MMAudio + +1-Click Windows, RunPod and Massed Compute Installers for MMAudio : https://www.patreon.com/posts/117990364 + +https://www.patreon.com/posts/117990364 - Installs into Python 3.10 VENV + +Used very simple prompts - it fails when there is human in input video so use text to audio in such cases + +I also tested some VRAM usages for CogVideoX1.5-5B-I2V + +Resolutions and here their VRAM requirements - may work on lower VRAM GPUs too but slower + +512x288 - 41 frames : 7700 MB , 576x320 - 41 frames : 7900 MB + +576x320 - 81 frames : 8850 MB , 704x384 - 81 frames : 8950 MB + +768x432 - 81 frames : 10600 MB , 896x496 - 81 frames : 12050 MB + +896x496 - 81 frames : 12050 MB , 960x528 - 81 frames : 12850 MB + + + + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/7FWSMGRP5jRtAYiJQ9XwH.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/qUjNJVh2Dv6tgzc_xwzIX.png'}]",[],"[{'reaction': '👍', 'users': ['MonsterMMORPG', 'John6666', 'NeroBlackstone', 'littleFishCat', 'async0x42', 'nicolay-r', 'iky1e', 'Grundrak'], 'count': 8}, {'reaction': '🔥', 'users': ['MonsterMMORPG', 'iky1e', 'adrisinaga', 'haider482', 'Grundrak'], 'count': 5}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'iky1e', 'Aghiagte', 'haider482'], 'count': 4}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'muratakardev'], 'count': 2}, {'reaction': '🧠', 'users': ['MonsterMMORPG', 'Grundrak'], 'count': 2}, {'reaction': '👀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2024-12-25 01:06:34,2024-12-25 08:03:35.867,"[{'_id': '676bbb3b1e0d2bf464043371', 'avatarUrl': '/avatars/ededed955cd4589e62536905b36636d4.svg', 'fullname': 'nick', 'name': 'nick76876', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/MonsterMMORPG/705288275447563,2943,"{'language': 'en', 'probability': 0.6438827514648438}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,427368037724361,"[{'type': 'text', 'value': 'Hi HuggingFacers!🤶🏼', 'raw': 'Hi HuggingFacers!🤶🏼'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""As my last 2024 project, I've dropped a Discord Bot that knows a lot about Pokemons🦋"", 'raw': ""As my last 2024 project, I've dropped a Discord Bot that knows a lot about Pokemons🦋""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub 👉 ', 'raw': 'GitHub 👉 '}, {'type': 'link', 'href': 'https://github.com/AstraBert/Pokemon-Bot', 'raw': 'https://github.com/AstraBert/Pokemon-Bot'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo Space 👉 ', 'raw': 'Demo Space 👉 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'as-cle-bert/pokemon-bot'}, 'url': 'https://huggingface.co/spaces/as-cle-bert/pokemon-bot', 'raw': 'https://huggingface.co/spaces/as-cle-bert/pokemon-bot'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The bot integrates:', 'raw': 'The bot integrates:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Chat features (Cohere's Command-R) with RAG functionalities (hybrid search and reranking with Qdrant) and chat memory (managed through PostgreSQL) to produce information about Pokemons"", 'raw': ""- Chat features (Cohere's Command-R) with RAG functionalities (hybrid search and reranking with Qdrant) and chat memory (managed through PostgreSQL) to produce information about Pokemons""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Image-based search to identify Pokemons from their images (via Qdrant)', 'raw': '- Image-based search to identify Pokemons from their images (via Qdrant)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Card package random extraction and description', 'raw': '- Card package random extraction and description'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'HuggingFace🤗, as usual, plays the most important role in the application stack, with the following models:', 'raw': 'HuggingFace🤗, as usual, plays the most important role in the application stack, with the following models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'sentence-transformers/LaBSE'}, 'url': 'https://huggingface.co/sentence-transformers/LaBSE', 'raw': 'https://huggingface.co/sentence-transformers/LaBSE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivida/Splade_PP_en_v1'}, 'url': 'https://huggingface.co/prithivida/Splade_PP_en_v1', 'raw': 'https://huggingface.co/prithivida/Splade_PP_en_v1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'facebook/dinov2-large'}, 'url': 'https://huggingface.co/facebook/dinov2-large', 'raw': 'https://huggingface.co/facebook/dinov2-large'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And datasets:', 'raw': 'And datasets:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Karbo31881/Pokemon_images'}, 'url': 'https://huggingface.co/datasets/Karbo31881/Pokemon_images', 'raw': 'https://huggingface.co/datasets/Karbo31881/Pokemon_images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'wanghaofan/pokemon-wiki-captions'}, 'url': 'https://huggingface.co/datasets/wanghaofan/pokemon-wiki-captions', 'raw': 'https://huggingface.co/datasets/wanghaofan/pokemon-wiki-captions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'TheFusion21/PokemonCards'}, 'url': 'https://huggingface.co/datasets/TheFusion21/PokemonCards', 'raw': 'https://huggingface.co/datasets/TheFusion21/PokemonCards'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have fun!🍕', 'raw': 'Have fun!🍕'}]","Hi HuggingFacers!🤶🏼 + +As my last 2024 project, I've dropped a Discord Bot that knows a lot about Pokemons🦋 + +GitHub 👉 https://github.com/AstraBert/Pokemon-Bot +Demo Space 👉 https://huggingface.co/spaces/as-cle-bert/pokemon-bot + +The bot integrates: +- Chat features (Cohere's Command-R) with RAG functionalities (hybrid search and reranking with Qdrant) and chat memory (managed through PostgreSQL) to produce information about Pokemons +- Image-based search to identify Pokemons from their images (via Qdrant) +- Card package random extraction and description + +HuggingFace🤗, as usual, plays the most important role in the application stack, with the following models: + +- https://huggingface.co/sentence-transformers/LaBSE +- https://huggingface.co/prithivida/Splade_PP_en_v1 +- https://huggingface.co/facebook/dinov2-large + +And datasets: + +- https://huggingface.co/datasets/Karbo31881/Pokemon_images +- https://huggingface.co/datasets/wanghaofan/pokemon-wiki-captions +- https://huggingface.co/datasets/TheFusion21/PokemonCards + +Have fun!🍕",[],[],"[{'reaction': '❤️', 'users': ['John6666', 'nicolay-r', 'AtAndDev', 'reonyy'], 'count': 4}]",2024-12-24 20:22:40,2024-12-24 20:22:40.127,[],/posts/as-cle-bert/427368037724361,1729,"{'language': 'en', 'probability': 0.7641462683677673}",0 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,217841936223171,"[{'type': 'text', 'value': 'A model that does well in math, reasoning, science and other benchmarks may not do well in wisdom domain. ', 'raw': 'A model that does well in math, reasoning, science and other benchmarks may not do well in wisdom domain. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'There are not many models that are focusing on wisdom it seems. It is going to be a problem. Smartness does not equal human alignment.', 'raw': 'There are not many models that are focusing on wisdom it seems. It is going to be a problem. Smartness does not equal human alignment.'}]","A model that does well in math, reasoning, science and other benchmarks may not do well in wisdom domain. + +There are not many models that are focusing on wisdom it seems. It is going to be a problem. Smartness does not equal human alignment.",[],[],[],2024-12-24 17:41:28,2024-12-26 09:54:41.544,[],/posts/etemiz/217841936223171,577,"{'language': 'en', 'probability': 0.9797914028167725}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,122561955245807,"[{'type': 'text', 'value': 'QwQ can see 🔥', 'raw': 'QwQ can see 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Qwen team released QvQ, a large vision LM with reasoning 😱', 'raw': 'Qwen team released QvQ, a large vision LM with reasoning 😱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'it outperforms proprietary VLMs on several benchmarks, comes with open weights and a demo! ', 'raw': 'it outperforms proprietary VLMs on several benchmarks, comes with open weights and a demo! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check them out ⬇️', 'raw': 'Check them out ⬇️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo ', 'raw': 'Demo '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/Qwen/QVQ-72B-preview', 'raw': 'https://huggingface.co/spaces/Qwen/QVQ-72B-preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model ', 'raw': 'Model '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Qwen/QVQ-72B-Preview'}, 'url': 'https://huggingface.co/Qwen/QVQ-72B-Preview', 'raw': 'https://huggingface.co/Qwen/QVQ-72B-Preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read more ', 'raw': 'Read more '}, {'type': 'link', 'href': 'https://qwenlm.github.io/blog/qvq-72b-preview/', 'raw': 'https://qwenlm.github.io/blog/qvq-72b-preview/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congratulations ', 'raw': 'Congratulations '}, {'type': 'mention', 'user': 'JustinLin610', 'raw': '@JustinLin610'}, {'type': 'text', 'value': ' and team! ', 'raw': ' and team! '}]","QwQ can see 🔥 +Qwen team released QvQ, a large vision LM with reasoning 😱 + +it outperforms proprietary VLMs on several benchmarks, comes with open weights and a demo! +Check them out ⬇️ +Demo https://huggingface.co/spaces/Qwen/QVQ-72B-preview +Model https://huggingface.co/Qwen/QVQ-72B-Preview +Read more https://qwenlm.github.io/blog/qvq-72b-preview/ +Congratulations @JustinLin610 and team! ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/Be87fn2K7LBMKEP3t5si7.jpeg'}]","[{'_id': '620760a26e3b7210c2ff1943', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/620760a26e3b7210c2ff1943/VC-rKqimF6yxGESNVlPoR.jpeg', 'fullname': 'Junyang Lin', 'name': 'JustinLin610', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 242}]","[{'reaction': '👍', 'users': ['dillfrescott', 'John6666', 'hyhung12', 'chriswritescode', 'Kaoeiri', 'nicolay-r', 'emredeveloper', 'Galileo', 'Nielly', 'AtAndDev', 'victor', 'Seyfelislem'], 'count': 12}, {'reaction': '👀', 'users': ['etemiz', 'YaTharThShaRma999', 'dillfrescott', 'zzlxv', 'nicolay-r', 'Nielly', 'AtAndDev', 'nofl'], 'count': 8}, {'reaction': '🔥', 'users': ['prithivMLmods', 'chriswritescode', 'John6666', 'Galileo', 'Nielly', 'AtAndDev'], 'count': 6}, {'reaction': '🚀', 'users': ['John6666', 'aguvener', 'Nielly', 'AtAndDev'], 'count': 4}]",2024-12-24 17:40:59,2024-12-27 17:12:13.366,"[{'_id': '676534357fa1ef387fa732a3', 'avatarUrl': '/avatars/8f4a848a238338a5c38739cfd6b2310c.svg', 'fullname': 'IDM Crack 6.42 Build 35 Patch + Serial Key Downloa', 'name': 'idmcrackdownload', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '6758a9850e3fff481964ca6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/EolfJfjW25hC4Bt_hCPq8.png', 'fullname': 'Jean Louis', 'name': 'JLouisBiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/merve/122561955245807,4779,"{'language': 'en', 'probability': 0.8591617941856384}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,656710195018725,"[{'type': 'text', 'value': 'After 6 years, BERT, the workhorse of encoder models, finally gets a replacement: 𝗪𝗲𝗹𝗰𝗼𝗺𝗲 𝗠𝗼𝗱𝗲𝗿𝗻𝗕𝗘𝗥𝗧! 🤗', 'raw': 'After 6 years, BERT, the workhorse of encoder models, finally gets a replacement: 𝗪𝗲𝗹𝗰𝗼𝗺𝗲 𝗠𝗼𝗱𝗲𝗿𝗻𝗕𝗘𝗥𝗧! 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We talk a lot about ✨Generative AI✨, meaning ""Decoder version of the Transformers architecture"", but this is only one of the ways to build LLMs: encoder models, that turn a sentence in a vector, are maybe even more widely used in industry than generative models.', 'raw': 'We talk a lot about ✨Generative AI✨, meaning ""Decoder version of the Transformers architecture"", but this is only one of the ways to build LLMs: encoder models, that turn a sentence in a vector, are maybe even more widely used in industry than generative models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The workhorse for this category has been BERT since its release in 2018 (that's prehistory for LLMs)."", 'raw': ""The workhorse for this category has been BERT since its release in 2018 (that's prehistory for LLMs).""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's not a fancy 100B parameters supermodel (just a few hundred millions), but it's an excellent workhorse, kind of a Honda Civic for LLMs."", 'raw': ""It's not a fancy 100B parameters supermodel (just a few hundred millions), but it's an excellent workhorse, kind of a Honda Civic for LLMs.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Many applications use BERT-family models - the top models in this category cumulate millions of downloads on the Hub.', 'raw': 'Many applications use BERT-family models - the top models in this category cumulate millions of downloads on the Hub.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""➡️ Now a collaboration between Answer.AI and LightOn just introduced BERT's replacement: ModernBERT."", 'raw': ""➡️ Now a collaboration between Answer.AI and LightOn just introduced BERT's replacement: ModernBERT.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗧𝗟;𝗗𝗥:', 'raw': '𝗧𝗟;𝗗𝗥:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏛️ Architecture changes:', 'raw': '🏛️ Architecture changes:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⇒ First, standard modernizations:', 'raw': '⇒ First, standard modernizations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Rotary positional embeddings (RoPE)', 'raw': '- Rotary positional embeddings (RoPE)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Replace GeLU with GeGLU,', 'raw': '- Replace GeLU with GeGLU,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Use Flash Attention 2 ', 'raw': '- Use Flash Attention 2 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ The team also introduced innovative techniques like alternating attention instead of full attention, and sequence packing to get rid of padding overhead.', 'raw': '✨ The team also introduced innovative techniques like alternating attention instead of full attention, and sequence packing to get rid of padding overhead.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥇 As a result, the model tops the game of encoder models:', 'raw': '🥇 As a result, the model tops the game of encoder models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It beats previous standard DeBERTaV3 for 1/5th the memory footprint, and runs 4x faster!', 'raw': 'It beats previous standard DeBERTaV3 for 1/5th the memory footprint, and runs 4x faster!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the blog post 👉 ', 'raw': 'Read the blog post 👉 '}, {'type': 'link', 'href': 'https://huggingface.co/blog/modernbert', 'raw': 'https://huggingface.co/blog/modernbert'}]","After 6 years, BERT, the workhorse of encoder models, finally gets a replacement: 𝗪𝗲𝗹𝗰𝗼𝗺𝗲 𝗠𝗼𝗱𝗲𝗿𝗻𝗕𝗘𝗥𝗧! 🤗 + +We talk a lot about ✨Generative AI✨, meaning ""Decoder version of the Transformers architecture"", but this is only one of the ways to build LLMs: encoder models, that turn a sentence in a vector, are maybe even more widely used in industry than generative models. + +The workhorse for this category has been BERT since its release in 2018 (that's prehistory for LLMs). + +It's not a fancy 100B parameters supermodel (just a few hundred millions), but it's an excellent workhorse, kind of a Honda Civic for LLMs. + +Many applications use BERT-family models - the top models in this category cumulate millions of downloads on the Hub. + +➡️ Now a collaboration between Answer.AI and LightOn just introduced BERT's replacement: ModernBERT. + +𝗧𝗟;𝗗𝗥: +🏛️ Architecture changes: +⇒ First, standard modernizations: +- Rotary positional embeddings (RoPE) +- Replace GeLU with GeGLU, +- Use Flash Attention 2 +✨ The team also introduced innovative techniques like alternating attention instead of full attention, and sequence packing to get rid of padding overhead. + +🥇 As a result, the model tops the game of encoder models: +It beats previous standard DeBERTaV3 for 1/5th the memory footprint, and runs 4x faster! + +Read the blog post 👉 https://huggingface.co/blog/modernbert","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/SrI_JG49g9n_4AP7JcVaK.png'}]",[],"[{'reaction': '🔥', 'users': ['tudorizer', 'victor', 'John6666', 'wilsoneaton', 'prithivMLmods', 'celinah', 'GoDjMike', 'comarproject', 'dillfrescott'], 'count': 9}, {'reaction': '👍', 'users': ['bouy1915', 'xargs01', 'comarproject', 'dillfrescott', 'linekin'], 'count': 5}]",2024-12-19 17:26:20,2024-12-21 11:14:34.706,"[{'_id': '6691250d76c0fa097fd048be', 'avatarUrl': '/avatars/5a398c9961a1f76e18203b3521f3b5da.svg', 'fullname': 'Corwin Black', 'name': 'Mescalamba', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/m-ric/656710195018725,2480,"{'language': 'en', 'probability': 0.883269190788269}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1613655355830-noauth.png,209.0,Anton Lozhkov,anton-l,518484149515717,"[{'type': 'text', 'value': 'Introducing 📐𝐅𝐢𝐧𝐞𝐌𝐚𝐭𝐡: the best public math pre-training dataset with 50B+ tokens!', 'raw': 'Introducing 📐𝐅𝐢𝐧𝐞𝐌𝐚𝐭𝐡: the best public math pre-training dataset with 50B+ tokens!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HuggingFaceTB/finemath'}, 'url': 'https://huggingface.co/datasets/HuggingFaceTB/finemath', 'raw': 'https://huggingface.co/datasets/HuggingFaceTB/finemath'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Math remains challenging for LLMs and by training on FineMath we see considerable gains over other math datasets, especially on GSM8K and MATH.', 'raw': 'Math remains challenging for LLMs and by training on FineMath we see considerable gains over other math datasets, especially on GSM8K and MATH.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We build the dataset by:', 'raw': 'We build the dataset by:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ carefully extracting math data from Common Crawl;', 'raw': '🛠️ carefully extracting math data from Common Crawl;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔎 iteratively filtering and recalling high quality math pages using a classifier trained on synthetic annotations to identify math reasoning and deduction. ', 'raw': '🔎 iteratively filtering and recalling high quality math pages using a classifier trained on synthetic annotations to identify math reasoning and deduction. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We conducted a series of ablations comparing the performance of Llama-3.2-3B-Base after continued pre-training on FineMath and observe notable gains compared to the baseline model and other public math datasets.', 'raw': 'We conducted a series of ablations comparing the performance of Llama-3.2-3B-Base after continued pre-training on FineMath and observe notable gains compared to the baseline model and other public math datasets.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We hope this helps advance the performance of LLMs on math and reasoning! 🚀 ', 'raw': 'We hope this helps advance the performance of LLMs on math and reasoning! 🚀 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We’re also releasing all the ablation models as well as the evaluation code. ', 'raw': 'We’re also releasing all the ablation models as well as the evaluation code. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'HuggingFaceTB/finemath-6763fb8f71b6439b653482c2'}, 'url': 'https://huggingface.co/collections/HuggingFaceTB/finemath-6763fb8f71b6439b653482c2', 'raw': 'https://huggingface.co/collections/HuggingFaceTB/finemath-6763fb8f71b6439b653482c2'}]","Introducing 📐𝐅𝐢𝐧𝐞𝐌𝐚𝐭𝐡: the best public math pre-training dataset with 50B+ tokens! +https://huggingface.co/datasets/HuggingFaceTB/finemath + +Math remains challenging for LLMs and by training on FineMath we see considerable gains over other math datasets, especially on GSM8K and MATH. + +We build the dataset by: +🛠️ carefully extracting math data from Common Crawl; +🔎 iteratively filtering and recalling high quality math pages using a classifier trained on synthetic annotations to identify math reasoning and deduction. + +We conducted a series of ablations comparing the performance of Llama-3.2-3B-Base after continued pre-training on FineMath and observe notable gains compared to the baseline model and other public math datasets. + +We hope this helps advance the performance of LLMs on math and reasoning! 🚀 +We’re also releasing all the ablation models as well as the evaluation code. + +https://huggingface.co/collections/HuggingFaceTB/finemath-6763fb8f71b6439b653482c2",[],[],"[{'reaction': '🚀', 'users': ['prithivMLmods', 'loubnabnl', 'capybarist', 'John6666', 'wilsoneaton', 'lamaraguilar', 'AtAndDev', 'gabrielmbmb', 'eastbrick'], 'count': 9}, {'reaction': '🔥', 'users': ['eliebak', 'loubnabnl', 'davanstrien', 'anakin87', 'victor', 'AtAndDev', 'SultanR'], 'count': 7}]",2024-12-19 16:22:22,2024-12-19 16:22:59.984,[],/posts/anton-l/518484149515717,2997,"{'language': 'en', 'probability': 0.8831292390823364}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,365761007512975,"[{'type': 'text', 'value': 'Qwen2VL Models: Vision and Language Processing 🍉', 'raw': 'Qwen2VL Models: Vision and Language Processing 🍉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📍FT; [ Latex OCR, Math Parsing, Text Analogy OCRTest ]', 'raw': '📍FT; [ Latex OCR, Math Parsing, Text Analogy OCRTest ]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Colab Demo: ', 'raw': 'Colab Demo: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Qwen2-VL-OCR-2B-Instruct'}, 'url': 'https://huggingface.co/prithivMLmods/Qwen2-VL-OCR-2B-Instruct/blob/main/Demo/ocrtest_qwen.ipynb', 'raw': 'https://huggingface.co/prithivMLmods/Qwen2-VL-OCR-2B-Instruct/blob/main/Demo/ocrtest_qwen.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '❄️Demo : ', 'raw': '❄️Demo : '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/prithivMLmods/Qwen2-VL-2B', 'raw': 'https://huggingface.co/spaces/prithivMLmods/Qwen2-VL-2B'}, {'type': 'text', 'value': ' . The demo includes the Qwen2VL 2B Base Model. ', 'raw': ' . The demo includes the Qwen2VL 2B Base Model. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯The space handles documenting content from the input image along with standardized plain text. It includes adjustment tools with over 30 font styles, file formatting support for PDF and DOCX, textual alignments, font size adjustments, and line spacing modifications. ', 'raw': '🎯The space handles documenting content from the input image along with standardized plain text. It includes adjustment tools with over 30 font styles, file formatting support for PDF and DOCX, textual alignments, font size adjustments, and line spacing modifications. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄PDFs are rendered using the ReportLab software library toolkit. ', 'raw': '📄PDFs are rendered using the ReportLab software library toolkit. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧵Models : ', 'raw': '🧵Models : '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+ ', 'raw': '+ '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Qwen2-VL-OCR-2B-Instruct'}, 'url': 'https://huggingface.co/prithivMLmods/Qwen2-VL-OCR-2B-Instruct', 'raw': 'https://huggingface.co/prithivMLmods/Qwen2-VL-OCR-2B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+ ', 'raw': '+ '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Qwen2-VL-Ocrtest-2B-Instruct'}, 'url': 'https://huggingface.co/prithivMLmods/Qwen2-VL-Ocrtest-2B-Instruct', 'raw': 'https://huggingface.co/prithivMLmods/Qwen2-VL-Ocrtest-2B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+ ', 'raw': '+ '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Qwen2-VL-Math-Prase-2B-Instruct'}, 'url': 'https://huggingface.co/prithivMLmods/Qwen2-VL-Math-Prase-2B-Instruct', 'raw': 'https://huggingface.co/prithivMLmods/Qwen2-VL-Math-Prase-2B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀Sample Document :', 'raw': '🚀Sample Document :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+ ', 'raw': '+ '}, {'type': 'link', 'href': 'https://drive.google.com/file/d/1Hfqqzq4Xc-3eTjbz-jcQY84V5E1YM71E/view?usp=sharing', 'raw': 'https://drive.google.com/file/d/1Hfqqzq4Xc-3eTjbz-jcQY84V5E1YM71E/view?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📦Collection : ', 'raw': '📦Collection : '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '+ ', 'raw': '+ '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/vision-language-models-67639f790e806e1f9799979f'}, 'url': 'https://huggingface.co/collections/prithivMLmods/vision-language-models-67639f790e806e1f9799979f', 'raw': 'https://huggingface.co/collections/prithivMLmods/vision-language-models-67639f790e806e1f9799979f'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'prithivMLmods', 'raw': '@prithivMLmods'}, {'type': 'text', 'value': ' 🤗', 'raw': ' 🤗'}, {'type': 'new_line', 'raw': '\n'}]","Qwen2VL Models: Vision and Language Processing 🍉 + +📍FT; [ Latex OCR, Math Parsing, Text Analogy OCRTest ] + +Colab Demo: https://huggingface.co/prithivMLmods/Qwen2-VL-OCR-2B-Instruct/blob/main/Demo/ocrtest_qwen.ipynb + +❄️Demo : https://huggingface.co/spaces/prithivMLmods/Qwen2-VL-2B . The demo includes the Qwen2VL 2B Base Model. + +🎯The space handles documenting content from the input image along with standardized plain text. It includes adjustment tools with over 30 font styles, file formatting support for PDF and DOCX, textual alignments, font size adjustments, and line spacing modifications. + +📄PDFs are rendered using the ReportLab software library toolkit. + +🧵Models : ++ https://huggingface.co/prithivMLmods/Qwen2-VL-OCR-2B-Instruct ++ https://huggingface.co/prithivMLmods/Qwen2-VL-Ocrtest-2B-Instruct ++ https://huggingface.co/prithivMLmods/Qwen2-VL-Math-Prase-2B-Instruct + +🚀Sample Document : ++ https://drive.google.com/file/d/1Hfqqzq4Xc-3eTjbz-jcQY84V5E1YM71E/view?usp=sharing + +📦Collection : ++ https://huggingface.co/collections/prithivMLmods/vision-language-models-67639f790e806e1f9799979f + +. +. +. +@prithivMLmods 🤗 +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/LMOHkrC9eGGLhXme-Y1KJ.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/mStrvqwPzjpfPsANJZoLk.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/jIXuH33cgNhkNwgNQO8PH.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/Jdx79O-As1i99TElCxbc-.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ySc5AuD4SajQF4jQYIYMZ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/tenR2uaatWdDyNewffua8.png'}]","[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957}]","[{'reaction': '👀', 'users': ['bfletcher', 'ferisella', 'AdinaY'], 'count': 3}, {'reaction': '🤗', 'users': ['John6666', 'AdinaY'], 'count': 2}, {'reaction': '🔥', 'users': ['ai-everyday'], 'count': 1}]",2024-12-19 16:09:49,2025-01-06 08:30:44.457,"[{'_id': '6719812c7a504dc24677bcbc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/V5pyJQgBARCle7slMaSJz.jpeg', 'fullname': 'AMIA THIERRY STEPHANE', 'name': 'r4gamia', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/prithivMLmods/365761007512975,2590,"{'language': 'en', 'probability': 0.5381271243095398}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6727ae309b9d58f33eba419a/A-8lDQBqm8HVZzMvSrooS.jpeg,43.0,Franck Abgrall,FranckAbgrall,313711873819927,"[{'type': 'text', 'value': '🆕 It should now be easier to identify discussions or pull requests where repository owners are participating on HF, let us know it that helps 💬🤗', 'raw': '🆕 It should now be easier to identify discussions or pull requests where repository owners are participating on HF, let us know it that helps 💬🤗'}]","🆕 It should now be easier to identify discussions or pull requests where repository owners are participating on HF, let us know it that helps 💬🤗","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6727ae309b9d58f33eba419a/lmfaA8fhjWuAscqpIj-ps.mp4'}]",[],"[{'reaction': '🔥', 'users': ['victor', 'not-lain', 'Nymbo', 'John6666', 'bfletcher', 'celinah', 'Joseph717171'], 'count': 7}, {'reaction': '🤗', 'users': ['Joseph717171'], 'count': 1}]",2024-12-19 15:10:00,2024-12-21 10:30:27.870,"[{'_id': '651e93137b2a2e027f9e55df', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/651e93137b2a2e027f9e55df/5oXWJeEDCrMJLA4s_0I93.png', 'fullname': 'Aurélien-Morgan CLAUDON', 'name': 'Aurelien-Morgan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}]",/posts/FranckAbgrall/313711873819927,1602,"{'language': 'en', 'probability': 0.9052979946136475}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg,233.0,s3nh,s3nh,484472930917681,"[{'type': 'text', 'value': 'Welcome back, ', 'raw': 'Welcome back, '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Small Language Models Enthusiasts and GPU Poor oss enjoyers lets connect. ', 'raw': 'Small Language Models Enthusiasts and GPU Poor oss enjoyers lets connect. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just created an organization which main target is to have fun with smaller models tuneable on consumer range GPUs, feel free to join and lets have some fun, much love ;3', 'raw': 'Just created an organization which main target is to have fun with smaller models tuneable on consumer range GPUs, feel free to join and lets have some fun, much love ;3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'SmolTuners'}, 'url': 'https://huggingface.co/SmolTuners', 'raw': 'https://huggingface.co/SmolTuners', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/BL14S3AUXdWQjRjMBscpo.png'}]","Welcome back, + +Small Language Models Enthusiasts and GPU Poor oss enjoyers lets connect. +Just created an organization which main target is to have fun with smaller models tuneable on consumer range GPUs, feel free to join and lets have some fun, much love ;3 + +https://huggingface.co/SmolTuners",[],[],"[{'reaction': '❤️', 'users': ['John6666', 'JuniperChinenye', 'takarajordan', 'CaioXapelaum', 'AtAndDev', 'santhoshkammari', 'Fizzarolli', 'TheDrunkenSnail', 'Selefk', 'coderfpv', 'raushan-in', 'KnutJaegersberg'], 'count': 12}, {'reaction': '🤗', 'users': ['John6666', 'CaioXapelaum', 'AtAndDev', 'Felladrin', 's3nh'], 'count': 5}]",2024-12-19 13:35:36,2024-12-19 21:07:20.619,"[{'_id': '6613f7ae43c4456e13ecbdcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg', 'fullname': 'Jordan Legg', 'name': 'takarajordan', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 40, 'isFollowing': False}, {'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '634262af8d8089ebaefd410e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/634262af8d8089ebaefd410e/pcnqe74uMV90K3HVuM76F.png', 'fullname': 'Fizz 🏳️\u200d⚧️', 'name': 'Fizzarolli', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 84, 'isFollowing': False}]",/posts/s3nh/484472930917681,2194,"{'language': 'en', 'probability': 0.92228102684021}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/62d648291fa3e4e7ae3fa6e8/oatOwf8Xqe5eDbCSuYqCd.png,3314.0,ben burtenshaw,burtenshaw,878651682523866,"[{'type': 'text', 'value': 'People are flexing their end of year stats, so I made this app to show hub stats in a tidy design!', 'raw': 'People are flexing their end of year stats, so I made this app to show hub stats in a tidy design!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks ', 'raw': 'Thanks '}, {'type': 'mention', 'user': 'Ameeeee', 'raw': '@Ameeeee'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'jfcalvo', 'raw': '@jfcalvo'}, {'type': 'text', 'value': ' for the feature from Argilla!', 'raw': ' for the feature from Argilla!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'burtenshaw/recap'}, 'url': 'https://huggingface.co/spaces/burtenshaw/recap', 'raw': 'https://huggingface.co/spaces/burtenshaw/recap'}, {'type': 'new_line', 'raw': '\n'}]","People are flexing their end of year stats, so I made this app to show hub stats in a tidy design! + +Thanks @Ameeeee and @jfcalvo for the feature from Argilla! +https://huggingface.co/spaces/burtenshaw/recap +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62d648291fa3e4e7ae3fa6e8/sqKS7C6OG6wnzcRG6bl3d.png'}]","[{'_id': '63e27f0f1f963b8f20f4a10d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63e27f0f1f963b8f20f4a10d/n9KcVAzZDfymP9j_jpTRc.jpeg', 'fullname': 'Ame Vi', 'name': 'Ameeeee', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 90}, {'_id': '647b8c5c9c71e4286021b265', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/647b8c5c9c71e4286021b265/YBQ-UgVsEa18FoieeXcaw.jpeg', 'fullname': 'José Francisco', 'name': 'jfcalvo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 30}]","[{'reaction': '❤️', 'users': ['s3nh', 'John6666', 'NeoPy', 'clem', 'julien-c', 'dvilasuero', 'gabrielmbmb', 'jeffboudier', 'BluecoderBlues', 'Nymbo', 'JudithR79', 'alvanlii'], 'count': 12}, {'reaction': '🤗', 'users': ['John6666', 'clem', 'julien-c', 'dvilasuero', 'FranckAbgrall', 'gabrielmbmb', 'jeffboudier', 'Nymbo', 'JudithR79'], 'count': 9}, {'reaction': '🚀', 'users': ['prithivMLmods'], 'count': 1}]",2024-12-19 13:20:49,2024-12-19 14:49:05.730,"[{'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}]",/posts/burtenshaw/878651682523866,3069,"{'language': 'en', 'probability': 0.8713436722755432}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,853337605317831,"[{'type': 'text', 'value': '𝐇𝐮𝐠𝐠𝐢𝐧𝐠 𝐅𝐚𝐜𝐞 𝐫𝐞𝐥𝐞𝐚𝐬𝐞𝐬 𝐏𝐢𝐜𝐨𝐭𝐫𝐨𝐧, 𝐚 𝐦𝐢𝐜𝐫𝐨𝐬𝐜𝐨𝐩𝐢𝐜 𝐥𝐢𝐛 𝐭𝐡𝐚𝐭 𝐬𝐨𝐥𝐯𝐞𝐬 𝐋𝐋𝐌 𝐭𝐫𝐚𝐢𝐧𝐢𝐧𝐠 𝟒𝐃 𝐩𝐚𝐫𝐚𝐥𝐥𝐞𝐥𝐢𝐳𝐚𝐭𝐢𝐨𝐧 🥳', 'raw': '𝐇𝐮𝐠𝐠𝐢𝐧𝐠 𝐅𝐚𝐜𝐞 𝐫𝐞𝐥𝐞𝐚𝐬𝐞𝐬 𝐏𝐢𝐜𝐨𝐭𝐫𝐨𝐧, 𝐚 𝐦𝐢𝐜𝐫𝐨𝐬𝐜𝐨𝐩𝐢𝐜 𝐥𝐢𝐛 𝐭𝐡𝐚𝐭 𝐬𝐨𝐥𝐯𝐞𝐬 𝐋𝐋𝐌 𝐭𝐫𝐚𝐢𝐧𝐢𝐧𝐠 𝟒𝐃 𝐩𝐚𝐫𝐚𝐥𝐥𝐞𝐥𝐢𝐳𝐚𝐭𝐢𝐨𝐧 🥳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🕰️ Llama-3.1-405B took 39 million GPU-hours to train, i.e. about 4.5 thousand years.', 'raw': '🕰️ Llama-3.1-405B took 39 million GPU-hours to train, i.e. about 4.5 thousand years.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👴🏻 If they had needed all this time, we would have GPU stories from the time of Pharaoh 𓂀: ""Alas, Lord of Two Lands, the shipment of counting-stones arriving from Cathay was lost to pirates, this shall delay the building of your computing temple by many moons ""', 'raw': '👴🏻 If they had needed all this time, we would have GPU stories from the time of Pharaoh 𓂀: ""Alas, Lord of Two Lands, the shipment of counting-stones arriving from Cathay was lost to pirates, this shall delay the building of your computing temple by many moons ""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ But instead, they just parallelized the training on 24k H100s, which made it take just a few months.', 'raw': '🛠️ But instead, they just parallelized the training on 24k H100s, which made it take just a few months.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This required parallelizing across 4 dimensions: data, tensor, context, pipeline.', 'raw': 'This required parallelizing across 4 dimensions: data, tensor, context, pipeline.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And it is infamously hard to do, making for bloated code repos that hold together only by magic.', 'raw': 'And it is infamously hard to do, making for bloated code repos that hold together only by magic.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🤏 𝗕𝘂𝘁 𝗻𝗼𝘄 𝘄𝗲 𝗱𝗼𝗻'𝘁 𝗻𝗲𝗲𝗱 𝗵𝘂𝗴𝗲 𝗿𝗲𝗽𝗼𝘀 𝗮𝗻𝘆𝗺𝗼𝗿𝗲! Instead of building mega-training codes, Hugging Face colleagues cooked in the other direction, towards tiny 4D parallelism libs. A team has built Nanotron, already widely used in industry."", 'raw': ""🤏 𝗕𝘂𝘁 𝗻𝗼𝘄 𝘄𝗲 𝗱𝗼𝗻'𝘁 𝗻𝗲𝗲𝗱 𝗵𝘂𝗴𝗲 𝗿𝗲𝗽𝗼𝘀 𝗮𝗻𝘆𝗺𝗼𝗿𝗲! Instead of building mega-training codes, Hugging Face colleagues cooked in the other direction, towards tiny 4D parallelism libs. A team has built Nanotron, already widely used in industry.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""And now a team releases Picotron, a radical approach to code 4D Parallelism in just a few hundred lines of code, a real engineering prowess, making it much easier to understand what's actually happening!"", 'raw': ""And now a team releases Picotron, a radical approach to code 4D Parallelism in just a few hundred lines of code, a real engineering prowess, making it much easier to understand what's actually happening!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""⚡ 𝗜𝘁'𝘀 𝘁𝗶𝗻𝘆, 𝘆𝗲𝘁 𝗽𝗼𝘄𝗲𝗿𝗳𝘂𝗹:"", 'raw': ""⚡ 𝗜𝘁'𝘀 𝘁𝗶𝗻𝘆, 𝘆𝗲𝘁 𝗽𝗼𝘄𝗲𝗿𝗳𝘂𝗹:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Counting in MFU (Model FLOPs Utilization, how much the model actually uses all the compute potential), this lib reaches ~50% on SmolLM-1.7B model with 8 H100 GPUs, which is really close to what huge libs would reach. (Caution: the team is leading further benchmarks to verify this)', 'raw': 'Counting in MFU (Model FLOPs Utilization, how much the model actually uses all the compute potential), this lib reaches ~50% on SmolLM-1.7B model with 8 H100 GPUs, which is really close to what huge libs would reach. (Caution: the team is leading further benchmarks to verify this)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Go take a look 👉 ', 'raw': 'Go take a look 👉 '}, {'type': 'link', 'href': 'https://github.com/huggingface/picotron/tree/main/picotron', 'raw': 'https://github.com/huggingface/picotron/tree/main/picotron'}]","𝐇𝐮𝐠𝐠𝐢𝐧𝐠 𝐅𝐚𝐜𝐞 𝐫𝐞𝐥𝐞𝐚𝐬𝐞𝐬 𝐏𝐢𝐜𝐨𝐭𝐫𝐨𝐧, 𝐚 𝐦𝐢𝐜𝐫𝐨𝐬𝐜𝐨𝐩𝐢𝐜 𝐥𝐢𝐛 𝐭𝐡𝐚𝐭 𝐬𝐨𝐥𝐯𝐞𝐬 𝐋𝐋𝐌 𝐭𝐫𝐚𝐢𝐧𝐢𝐧𝐠 𝟒𝐃 𝐩𝐚𝐫𝐚𝐥𝐥𝐞𝐥𝐢𝐳𝐚𝐭𝐢𝐨𝐧 🥳 + +🕰️ Llama-3.1-405B took 39 million GPU-hours to train, i.e. about 4.5 thousand years. + +👴🏻 If they had needed all this time, we would have GPU stories from the time of Pharaoh 𓂀: ""Alas, Lord of Two Lands, the shipment of counting-stones arriving from Cathay was lost to pirates, this shall delay the building of your computing temple by many moons "" + +🛠️ But instead, they just parallelized the training on 24k H100s, which made it take just a few months. +This required parallelizing across 4 dimensions: data, tensor, context, pipeline. +And it is infamously hard to do, making for bloated code repos that hold together only by magic. + +🤏 𝗕𝘂𝘁 𝗻𝗼𝘄 𝘄𝗲 𝗱𝗼𝗻'𝘁 𝗻𝗲𝗲𝗱 𝗵𝘂𝗴𝗲 𝗿𝗲𝗽𝗼𝘀 𝗮𝗻𝘆𝗺𝗼𝗿𝗲! Instead of building mega-training codes, Hugging Face colleagues cooked in the other direction, towards tiny 4D parallelism libs. A team has built Nanotron, already widely used in industry. +And now a team releases Picotron, a radical approach to code 4D Parallelism in just a few hundred lines of code, a real engineering prowess, making it much easier to understand what's actually happening! + +⚡ 𝗜𝘁'𝘀 𝘁𝗶𝗻𝘆, 𝘆𝗲𝘁 𝗽𝗼𝘄𝗲𝗿𝗳𝘂𝗹: +Counting in MFU (Model FLOPs Utilization, how much the model actually uses all the compute potential), this lib reaches ~50% on SmolLM-1.7B model with 8 H100 GPUs, which is really close to what huge libs would reach. (Caution: the team is leading further benchmarks to verify this) + +Go take a look 👉 https://github.com/huggingface/picotron/tree/main/picotron","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/G_ZN7mjwauj24CgqLFxiR.png'}]",[],"[{'reaction': '👍', 'users': ['Aurelien-Morgan', 'leeloolee', 'djuna', 's3nh', 'solankibhargav', 'Aviv-anthonnyolime', 'SujanKarki', 'carlizor', 'aifeifei798', 'Joseph717171', 'AtAndDev', 'joddy'], 'count': 12}, {'reaction': '👀', 'users': ['John6666', 'sequelbox', 'Joseph717171'], 'count': 3}, {'reaction': '🧠', 'users': ['prithivMLmods', 'carlizor', 'Joseph717171'], 'count': 3}]",2024-12-19 09:20:46,2024-12-19 12:29:08.220,"[{'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}]",/posts/m-ric/853337605317831,2620,"{'language': 'en', 'probability': 0.9283406734466553}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,383696834278825,"[{'type': 'text', 'value': '🐇 Tumble down the AI rabbit hole without any technical knowledge!', 'raw': '🐇 Tumble down the AI rabbit hole without any technical knowledge!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explore AI models on the Hub by a simple and quick search', 'raw': 'Explore AI models on the Hub by a simple and quick search'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'davidberenstein1957/transformers-pipeline-playground'}, 'url': 'https://huggingface.co/spaces/davidberenstein1957/transformers-pipeline-playground', 'raw': 'https://huggingface.co/spaces/davidberenstein1957/transformers-pipeline-playground'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","🐇 Tumble down the AI rabbit hole without any technical knowledge! + +Explore AI models on the Hub by a simple and quick search + +Demo: https://huggingface.co/spaces/davidberenstein1957/transformers-pipeline-playground + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/634ff41ff32062e9eb7b06a3/TnG4hlpGibFgTlVgK9DZa.mp4'}]",[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'John6666', 's3nh', 'AtAndDev'], 'count': 4}]",2024-12-19 06:10:46,2024-12-19 06:10:46.619,[],/posts/davidberenstein1957/383696834278825,1375,"{'language': 'en', 'probability': 0.7031517028808594}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62f93abbc4817cfc0756b6f8/rGYLaq-rmoJJYotkC1VXk.jpeg,74.0,Anton Obukhov,toshas,584531088823321,"[{'type': 'text', 'value': 'Introducing ⇆ Marigold-DC — our training-free zero-shot approach to monocular Depth Completion with guided diffusion! If you have ever wondered how else a long denoising diffusion schedule can be useful, we have an answer for you! ', 'raw': 'Introducing ⇆ Marigold-DC — our training-free zero-shot approach to monocular Depth Completion with guided diffusion! If you have ever wondered how else a long denoising diffusion schedule can be useful, we have an answer for you! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Depth Completion addresses sparse, incomplete, or noisy measurements from photogrammetry or sensors like LiDAR. Sparse points aren’t just hard for humans to interpret — they also hinder downstream tasks.', 'raw': 'Depth Completion addresses sparse, incomplete, or noisy measurements from photogrammetry or sensors like LiDAR. Sparse points aren’t just hard for humans to interpret — they also hinder downstream tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Traditionally, depth completion was framed as image-guided depth interpolation. We leverage Marigold, a diffusion-based monodepth model, to reframe it as sparse-depth-guided depth generation. How the turntables! Check out the paper anyway 👇', 'raw': 'Traditionally, depth completion was framed as image-guided depth interpolation. We leverage Marigold, a diffusion-based monodepth model, to reframe it as sparse-depth-guided depth generation. How the turntables! Check out the paper anyway 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌎 Website: ', 'raw': '🌎 Website: '}, {'type': 'link', 'href': 'https://marigolddepthcompletion.github.io/', 'raw': 'https://marigolddepthcompletion.github.io/'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 Demo: ', 'raw': '🤗 Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prs-eth/marigold-dc'}, 'url': 'https://huggingface.co/spaces/prs-eth/marigold-dc', 'raw': 'https://huggingface.co/spaces/prs-eth/marigold-dc'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📕 Paper: ', 'raw': '📕 Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2412.13389', 'raw': 'https://arxiv.org/abs/2412.13389'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👾 Code: ', 'raw': '👾 Code: '}, {'type': 'link', 'href': 'https://github.com/prs-eth/marigold-dc', 'raw': 'https://github.com/prs-eth/marigold-dc'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Team ETH Zürich: Massimiliano Viola (', 'raw': 'Team ETH Zürich: Massimiliano Viola ('}, {'type': 'mention', 'user': 'mviola', 'raw': '@mviola'}, {'type': 'text', 'value': '), Kevin Qu (', 'raw': '), Kevin Qu ('}, {'type': 'mention', 'user': 'KevinQu7', 'raw': '@KevinQu7'}, {'type': 'text', 'value': '), Nando Metzger (', 'raw': '), Nando Metzger ('}, {'type': 'mention', 'user': 'nandometzger', 'raw': '@nandometzger'}, {'type': 'text', 'value': '), Bingxin Ke (', 'raw': '), Bingxin Ke ('}, {'type': 'mention', 'user': 'Bingxin', 'raw': '@Bingxin'}, {'type': 'text', 'value': '), Alexander Becker, Konrad Schindler, and Anton Obukhov (', 'raw': '), Alexander Becker, Konrad Schindler, and Anton Obukhov ('}, {'type': 'mention', 'user': 'toshas', 'raw': '@toshas'}, {'type': 'text', 'value': '). We thank ', 'raw': '). We thank '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hugging Face for their continuous support.', 'raw': 'Hugging Face for their continuous support.'}]","Introducing ⇆ Marigold-DC — our training-free zero-shot approach to monocular Depth Completion with guided diffusion! If you have ever wondered how else a long denoising diffusion schedule can be useful, we have an answer for you! + +Depth Completion addresses sparse, incomplete, or noisy measurements from photogrammetry or sensors like LiDAR. Sparse points aren’t just hard for humans to interpret — they also hinder downstream tasks. + +Traditionally, depth completion was framed as image-guided depth interpolation. We leverage Marigold, a diffusion-based monodepth model, to reframe it as sparse-depth-guided depth generation. How the turntables! Check out the paper anyway 👇 + +🌎 Website: https://marigolddepthcompletion.github.io/ +🤗 Demo: https://huggingface.co/spaces/prs-eth/marigold-dc +📕 Paper: https://arxiv.org/abs/2412.13389 +👾 Code: https://github.com/prs-eth/marigold-dc + +Team ETH Zürich: Massimiliano Viola (@mviola), Kevin Qu (@KevinQu7), Nando Metzger (@nandometzger), Bingxin Ke (@Bingxin), Alexander Becker, Konrad Schindler, and Anton Obukhov (@toshas). We thank +Hugging Face for their continuous support.","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f93abbc4817cfc0756b6f8/06No6Yu-rPH8dCLy-qqUr.mp4'}]","[{'_id': '63d90391da4f72339244c2a8', 'avatarUrl': '/avatars/eb0e0259c391d59739c1a205c36bb539.svg', 'fullname': 'Bingxin Ke', 'name': 'Bingxin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 11}, {'_id': '660ae64e20634eb93dfafe2b', 'avatarUrl': '/avatars/9099f45f05e65574e435fe16189da060.svg', 'fullname': 'Kevin Qu', 'name': 'KevinQu7', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}, {'_id': '6627c05e6a732431857c841b', 'avatarUrl': '/avatars/c6d11b0a760fce64f683cf9836f481e6.svg', 'fullname': 'Massimiliano Viola', 'name': 'mviola', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}, {'_id': '63a5785a8fb23d08bb2d0291', 'avatarUrl': '/avatars/758b06dae06e9eee6fced10ce682aef1.svg', 'fullname': 'Nando Metzger', 'name': 'nandometzger', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7}, {'_id': '62f93abbc4817cfc0756b6f8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62f93abbc4817cfc0756b6f8/rGYLaq-rmoJJYotkC1VXk.jpeg', 'fullname': 'Anton Obukhov', 'name': 'toshas', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 74}]","[{'reaction': '🔥', 'users': ['John6666', 'KevinQu7', 'blanchon'], 'count': 3}, {'reaction': '😎', 'users': ['mviola', 'KevinQu7', 'blanchon'], 'count': 3}, {'reaction': '🚀', 'users': ['KevinQu7'], 'count': 1}]",2024-12-19 01:59:18,2024-12-19 01:59:18.009,[],/posts/toshas/584531088823321,1357,"{'language': 'en', 'probability': 0.760409414768219}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,957583917976329,"[{'type': 'text', 'value': 'Can we please do something about this? It makes everything I do so much harder, and because my local machine is so terrible, I am forced to test in production. This makes debugging so difficult.', 'raw': 'Can we please do something about this? It makes everything I do so much harder, and because my local machine is so terrible, I am forced to test in production. This makes debugging so difficult.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'nroggendorff/system-exit'}, 'url': 'https://huggingface.co/spaces/nroggendorff/system-exit', 'raw': 'https://huggingface.co/spaces/nroggendorff/system-exit'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'cc ', 'raw': 'cc '}, {'type': 'mention', 'user': 'victor', 'raw': '@victor'}]","Can we please do something about this? It makes everything I do so much harder, and because my local machine is so terrible, I am forced to test in production. This makes debugging so difficult. +https://huggingface.co/spaces/nroggendorff/system-exit + +cc @victor",[],"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949}]","[{'reaction': '➕', 'users': ['John6666', 'pepper13', 'wilsoneaton', 'hdickerson', 'bfletcher'], 'count': 5}, {'reaction': '👀', 'users': ['victor'], 'count': 1}]",2024-12-19 01:14:30,2024-12-19 02:43:12.811,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/nroggendorff/957583917976329,1332,"{'language': 'en', 'probability': 0.900927722454071}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6593502ca2607099284523db/svxu-iTvwsgmiYNgaFGIN.png,44.0,William J. Marshall,fuzzy-mittenz,190131162208206,"[{'type': 'text', 'value': ""8pm est New Discussion on AI privatization and it's importance for cooperative and confidential development, client services, and family use."", 'raw': ""8pm est New Discussion on AI privatization and it's importance for cooperative and confidential development, client services, and family use.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We can also touch on the NEW OPEN SOURCE which will solve MANY of the current problems we face not only with AI but as a society.', 'raw': 'We can also touch on the NEW OPEN SOURCE which will solve MANY of the current problems we face not only with AI but as a society.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '8pm', 'raw': '8pm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(Sorry upon startup some guy hacked the chat or simply crashed it)', 'raw': '(Sorry upon startup some guy hacked the chat or simply crashed it)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'new link for 8pm est', 'raw': 'new link for 8pm est'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://x.com/i/spaces/1MnxnDQrkjYGO', 'raw': 'https://x.com/i/spaces/1MnxnDQrkjYGO'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","8pm est New Discussion on AI privatization and it's importance for cooperative and confidential development, client services, and family use. + +We can also touch on the NEW OPEN SOURCE which will solve MANY of the current problems we face not only with AI but as a society. +8pm +(Sorry upon startup some guy hacked the chat or simply crashed it) +new link for 8pm est +https://x.com/i/spaces/1MnxnDQrkjYGO ",[],[],"[{'reaction': '👀', 'users': ['fuzzy-mittenz'], 'count': 1}]",2024-12-15 19:34:11,2024-12-16 00:51:25.125,"[{'_id': '6593502ca2607099284523db', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6593502ca2607099284523db/svxu-iTvwsgmiYNgaFGIN.png', 'fullname': 'William J. Marshall', 'name': 'fuzzy-mittenz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 44, 'isFollowing': False}]",/posts/fuzzy-mittenz/190131162208206,636,"{'language': 'en', 'probability': 0.9064681529998779}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg,69.0,Csaba Kecskemeti,csabakecskemeti,431590192984174,"[{'type': 'text', 'value': 'The AMD Instinct MI50 (~$110) is surprisingly fast for inference Quantized models. ', 'raw': 'The AMD Instinct MI50 (~$110) is surprisingly fast for inference Quantized models. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This runs a Llama 3.1 8B Q8 with Llama.cpp', 'raw': 'This runs a Llama 3.1 8B Q8 with Llama.cpp'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/DevQuasar/Mi50', 'raw': 'https://huggingface.co/spaces/DevQuasar/Mi50'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A little blogpost about the HW', 'raw': 'A little blogpost about the HW'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'http://devquasar.com/uncategorized/amd-radeon-instinct-mi50-cheap-inference/', 'raw': 'http://devquasar.com/uncategorized/amd-radeon-instinct-mi50-cheap-inference/'}]","The AMD Instinct MI50 (~$110) is surprisingly fast for inference Quantized models. + +This runs a Llama 3.1 8B Q8 with Llama.cpp +https://huggingface.co/spaces/DevQuasar/Mi50 + +A little blogpost about the HW +http://devquasar.com/uncategorized/amd-radeon-instinct-mi50-cheap-inference/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/FwN8i0efh8MDbQh8aTY84.jpeg'}]",[],"[{'reaction': '👍', 'users': ['DDUPS', 'Harbous', 'pixelcoder', 'spedrox-sac', 'John6666', 'reonyy', 'Clausss', 'nyuuzyou', 'ThatBlondeGuy', 'Eliaccess', 'logame07', 'spinhack', 'fuzzy-mittenz', 'wsuff', 'omaryshchenko', 'victor'], 'count': 16}, {'reaction': '🔥', 'users': ['fuzzy-mittenz'], 'count': 1}]",2024-12-15 04:49:03,2024-12-15 04:58:25.554,[],/posts/csabakecskemeti/431590192984174,4675,"{'language': 'en', 'probability': 0.6209259629249573}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,786375380898973,"[{'type': 'text', 'value': ""I hate it when I'm debugging a space and I run out of ZeroGPU quota."", 'raw': ""I hate it when I'm debugging a space and I run out of ZeroGPU quota.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Also, I'm totally not biased btw, but I think inference quota should scale with contributions too."", 'raw': ""Also, I'm totally not biased btw, but I think inference quota should scale with contributions too.""}]","I hate it when I'm debugging a space and I run out of ZeroGPU quota. + + +Also, I'm totally not biased btw, but I think inference quota should scale with contributions too.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'reonyy', 'den0620'], 'count': 3}]",2024-12-15 03:07:24,2024-12-15 15:26:05.843,"[{'_id': '675e479bf2b7722d13a0b65c', 'avatarUrl': '/avatars/24849c9be2c7db566bd378f41c0ab1c5.svg', 'fullname': 'Thomas Johnson', 'name': 'adminuser7652', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}]",/posts/nroggendorff/786375380898973,2345,"{'language': 'en', 'probability': 0.9366285800933838}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png,290.0,Ankit Pal,aaditya,208619849690381,"[{'type': 'text', 'value': 'Last Week in Medical AI: Top Research ', 'raw': 'Last Week in Medical AI: Top Research '}, {'type': 'text', 'raw': 'Papers/Models', 'value': 'Papers/Models'}, {'type': 'text', 'value': ' 🔥', 'raw': ' 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅 (December 7 – December 14, 2024)', 'raw': '🏅 (December 7 – December 14, 2024)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM & Other Models', 'raw': 'Medical LLM & Other Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- PediaBench: Chinese Pediatric LLM', 'raw': '- PediaBench: Chinese Pediatric LLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Comprehensive pediatric dataset', 'raw': '- Comprehensive pediatric dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Advanced benchmarking platform', 'raw': '- Advanced benchmarking platform'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Chinese healthcare innovation', 'raw': '- Chinese healthcare innovation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- BiMediX: Bilingual Medical LLM', 'raw': '- BiMediX: Bilingual Medical LLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual medical expertise', 'raw': '- Multilingual medical expertise'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Diverse medical knowledge integration', 'raw': '- Diverse medical knowledge integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cross-cultural healthcare insights', 'raw': '- Cross-cultural healthcare insights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MMedPO: Vision-Language Medical LLM', 'raw': '- MMedPO: Vision-Language Medical LLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Clinical multimodal optimization', 'raw': '- Clinical multimodal optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Advanced medical image understanding', 'raw': '- Advanced medical image understanding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Precision healthcare modeling', 'raw': '- Precision healthcare modeling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Frameworks and Methodologies', 'raw': 'Frameworks and Methodologies'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- TOP-Training: Medical Q&A Framework', 'raw': '- TOP-Training: Medical Q&A Framework'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Hybrid RAG: Secure Medical Data Management', 'raw': '- Hybrid RAG: Secure Medical Data Management'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Zero-Shot ATC Clinical Coding', 'raw': '- Zero-Shot ATC Clinical Coding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Chest X-Ray Diagnosis Architecture', 'raw': '- Chest X-Ray Diagnosis Architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Medical Imaging AI Democratization', 'raw': '- Medical Imaging AI Democratization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benchmarks & Evaluations', 'raw': 'Benchmarks & Evaluations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- KorMedMCQA: Korean Healthcare Licensing Benchmark', 'raw': '- KorMedMCQA: Korean Healthcare Licensing Benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Large Language Model Medical Tasks', 'raw': '- Large Language Model Medical Tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Clinical T5 Model Performance Study', 'raw': '- Clinical T5 Model Performance Study'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Radiology Report Quality Assessment', 'raw': '- Radiology Report Quality Assessment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Genomic Analysis Benchmarking', 'raw': '- Genomic Analysis Benchmarking'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM Applications', 'raw': 'Medical LLM Applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- BRAD: Digital Biology Language Model', 'raw': '- BRAD: Digital Biology Language Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- TCM-FTP: Herbal Prescription Prediction', 'raw': '- TCM-FTP: Herbal Prescription Prediction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LLaSA: Activity Analysis via Sensors', 'raw': '- LLaSA: Activity Analysis via Sensors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Emergency Department Visit Predictions', 'raw': '- Emergency Department Visit Predictions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Neurodegenerative Disease AI Diagnosis', 'raw': '- Neurodegenerative Disease AI Diagnosis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Kidney Disease Explainable AI Model', 'raw': '- Kidney Disease Explainable AI Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ethical AI & Privacy', 'raw': 'Ethical AI & Privacy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Privacy-Preserving LLM Mechanisms', 'raw': '- Privacy-Preserving LLM Mechanisms'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AI-Driven Digital Organism Modeling', 'raw': '- AI-Driven Digital Organism Modeling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Biomedical Research Automation', 'raw': '- Biomedical Research Automation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multimodality in Medical Practice', 'raw': '- Multimodality in Medical Practice'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full thread in detail: ', 'raw': 'Full thread in detail: '}, {'type': 'link', 'href': 'https://x.com/OpenlifesciAI/status/1867999825721242101', 'raw': 'https://x.com/OpenlifesciAI/status/1867999825721242101'}]","Last Week in Medical AI: Top Research Papers/Models 🔥 +🏅 (December 7 – December 14, 2024) + +Medical LLM & Other Models +- PediaBench: Chinese Pediatric LLM +- Comprehensive pediatric dataset +- Advanced benchmarking platform +- Chinese healthcare innovation +- BiMediX: Bilingual Medical LLM +- Multilingual medical expertise +- Diverse medical knowledge integration +- Cross-cultural healthcare insights +- MMedPO: Vision-Language Medical LLM +- Clinical multimodal optimization +- Advanced medical image understanding +- Precision healthcare modeling + +Frameworks and Methodologies +- TOP-Training: Medical Q&A Framework +- Hybrid RAG: Secure Medical Data Management +- Zero-Shot ATC Clinical Coding +- Chest X-Ray Diagnosis Architecture +- Medical Imaging AI Democratization + +Benchmarks & Evaluations +- KorMedMCQA: Korean Healthcare Licensing Benchmark +- Large Language Model Medical Tasks +- Clinical T5 Model Performance Study +- Radiology Report Quality Assessment +- Genomic Analysis Benchmarking + +Medical LLM Applications +- BRAD: Digital Biology Language Model +- TCM-FTP: Herbal Prescription Prediction +- LLaSA: Activity Analysis via Sensors +- Emergency Department Visit Predictions +- Neurodegenerative Disease AI Diagnosis +- Kidney Disease Explainable AI Model + +Ethical AI & Privacy +- Privacy-Preserving LLM Mechanisms +- AI-Driven Digital Organism Modeling +- Biomedical Research Automation +- Multimodality in Medical Practice + +Full thread in detail: https://x.com/OpenlifesciAI/status/1867999825721242101","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/JCLwDzNDXap_OYeqAYeTb.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['aaditya', 'akahana', 'ddiddi', 'AtAndDev', 'clem', 'rwightman', 'cyrilzakka', 'linoyts', 'isabelmllr'], 'count': 9}, {'reaction': '❤️', 'users': ['aaditya', 'clem', 'shetumohanto', 'isabelmllr'], 'count': 4}, {'reaction': '🚀', 'users': ['aaditya', 'John6666', 'ddiddi'], 'count': 3}, {'reaction': '🤗', 'users': ['aaditya', 'ch3nboyu'], 'count': 2}, {'reaction': '➕', 'users': ['aaditya'], 'count': 1}, {'reaction': '🧠', 'users': ['aaditya'], 'count': 1}]",2024-12-14 21:01:22,2024-12-20 12:27:45.694,"[{'_id': '63477bb66f8773f2a28daa15', 'avatarUrl': '/avatars/9a369763a73278cddcf2abcae594865d.svg', 'fullname': 'Dhruv Diddi', 'name': 'ddiddi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 13, 'isFollowing': False}, {'_id': '675c731c90080a7b45e49dd4', 'avatarUrl': '/avatars/1df9426af88bb3f23171ad91d331ec90.svg', 'fullname': 'bone', 'name': 'whalexx', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '5f3fe13d79c1ba4c353d0c19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png', 'fullname': 'Ankit Pal', 'name': 'aaditya', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 290, 'isFollowing': False}]",/posts/aaditya/208619849690381,3474,"{'language': 'en', 'probability': 0.661456286907196}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,887719818242245,"[{'type': 'text', 'value': 'The #NeurIPS2024 Class: Explore which are the leading research institutions 🎓🔬', 'raw': 'The #NeurIPS2024 Class: Explore which are the leading research institutions 🎓🔬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'huggingface/open-source-ai-year-in-review-2024'}, 'url': 'https://huggingface.co/spaces/huggingface/open-source-ai-year-in-review-2024?day=13', 'raw': 'https://huggingface.co/spaces/huggingface/open-source-ai-year-in-review-2024?day=13'}]","The #NeurIPS2024 Class: Explore which are the leading research institutions 🎓🔬 + +https://huggingface.co/spaces/huggingface/open-source-ai-year-in-review-2024?day=13","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/JSJvRU1bO14D49b72Kvg3.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-12-14 19:06:16,2024-12-14 19:06:16.621,[],/posts/fdaudens/887719818242245,709,"{'language': 'en', 'probability': 0.8358685970306396}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,535147412914330,"[{'type': 'text', 'value': '📢For those who wish to quick start with reasoning / cot application over rows of tabular data but with minimal dependencies, this post would be valuable.', 'raw': '📢For those who wish to quick start with reasoning / cot application over rows of tabular data but with minimal dependencies, this post would be valuable.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔎 I found that the problem is that given a bulk of Chain-of-Though (CoT) 🔗 queries for remotely accessed LLM 🤖 (like openrouter / Replicate / OpenAI) might result in connection loss which may lead exception 💥 and challenges with generated content restoration. ', 'raw': '🔎 I found that the problem is that given a bulk of Chain-of-Though (CoT) 🔗 queries for remotely accessed LLM 🤖 (like openrouter / Replicate / OpenAI) might result in connection loss which may lead exception 💥 and challenges with generated content restoration. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here, is where I contribute with the bulk-chain.', 'raw': 'Here, is where I contribute with the bulk-chain.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⭐ ', 'raw': '⭐ '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-chain', 'raw': 'https://github.com/nicolay-r/bulk-chain'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Currently working on 0.24.3 version, in which I am happy to announce the API for developing your apps that are based on CoT schema declaration in JSON (details in attached images 📸)', 'raw': 'Currently working on 0.24.3 version, in which I am happy to announce the API for developing your apps that are based on CoT schema declaration in JSON (details in attached images 📸)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All you have to do is:', 'raw': 'All you have to do is:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 1. Declare CoT-schema in json', 'raw': '✅ 1. Declare CoT-schema in json'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 2. Declare the model or use the preset', 'raw': '✅ 2. Declare the model or use the preset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 3. Launch code', 'raw': '✅ 3. Launch code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'One example is to use ReplicateIO provider:', 'raw': 'One example is to use ReplicateIO provider:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/nicolay-r/bulk-chain/blob/master/ext/replicate.py', 'raw': 'https://github.com/nicolay-r/bulk-chain/blob/master/ext/replicate.py'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Each model has a wrapped call for inference in try-catch block', 'raw': 'Each model has a wrapped call for inference in try-catch block'}]","📢For those who wish to quick start with reasoning / cot application over rows of tabular data but with minimal dependencies, this post would be valuable. + +🔎 I found that the problem is that given a bulk of Chain-of-Though (CoT) 🔗 queries for remotely accessed LLM 🤖 (like openrouter / Replicate / OpenAI) might result in connection loss which may lead exception 💥 and challenges with generated content restoration. + +Here, is where I contribute with the bulk-chain. +⭐ https://github.com/nicolay-r/bulk-chain + +Currently working on 0.24.3 version, in which I am happy to announce the API for developing your apps that are based on CoT schema declaration in JSON (details in attached images 📸) + +All you have to do is: +✅ 1. Declare CoT-schema in json +✅ 2. Declare the model or use the preset +✅ 3. Launch code + +One example is to use ReplicateIO provider: +https://github.com/nicolay-r/bulk-chain/blob/master/ext/replicate.py + +Each model has a wrapped call for inference in try-catch block","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/TeUec-vUiXg2ugfkj1Uly.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/jFi13zyca1RwGZ3lHs2mO.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'csabakecskemeti', 'AtAndDev'], 'count': 3}]",2024-12-14 18:13:33,2024-12-14 18:17:04.808,[],/posts/nicolay-r/535147412914330,1936,"{'language': 'en', 'probability': 0.8964587450027466}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/669c89e98f2dbc203f9e74ab/higvnXEHeo_Ig2bgTpn47.png,41.0,Vincent Granville,vincentg64,325757117778274,"[{'type': 'text', 'value': 'Where LLMs Fail the Most, and How to Fix it ', 'raw': 'Where LLMs Fail the Most, and How to Fix it '}, {'type': 'link', 'href': 'https://mltblog.com/41BcGDY', 'raw': 'https://mltblog.com/41BcGDY'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here I illustrate my two most recent interactions with AI-powered GPT. It was an awful failure, a lot worse than before GenAI. Indeed, I had to revert back to old Google search to get help. This is typical of what hundreds of millions of users now experience every day. ', 'raw': 'Here I illustrate my two most recent interactions with AI-powered GPT. It was an awful failure, a lot worse than before GenAI. Indeed, I had to revert back to old Google search to get help. This is typical of what hundreds of millions of users now experience every day. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ First example: ', 'raw': '➡️ First example: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I get payments from Stripe. I asked how I can pay someone, as opposed to getting paid, as I had a contact asking me to pay him with Stripe. After 30 mins of prompts to AI support, I got nowhere. In the end I decided to pay my contact using a different platform. I could not figure out how to a meaningful answer: see featured image. ', 'raw': 'I get payments from Stripe. I asked how I can pay someone, as opposed to getting paid, as I had a contact asking me to pay him with Stripe. After 30 mins of prompts to AI support, I got nowhere. In the end I decided to pay my contact using a different platform. I could not figure out how to a meaningful answer: see featured image. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Second example: ', 'raw': '➡️ Second example: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A VC guy I started to interact with sent me a few messages, but I never received any of them. I tried to contact my email provider, but was faced with a GenAI bot to answer the following precise question: his email address is xyz, mine is abc, his messages do not even show up in my spam box, and I did not block their domain name; how to fix this? After receiving irrelevant answers, I ask point blank: can I chat with a real human? Again, irrelevant answers, no matter how I phrase my question. In the end I told my contact to send messages to an alternate email address.', 'raw': 'A VC guy I started to interact with sent me a few messages, but I never received any of them. I tried to contact my email provider, but was faced with a GenAI bot to answer the following precise question: his email address is xyz, mine is abc, his messages do not even show up in my spam box, and I did not block their domain name; how to fix this? After receiving irrelevant answers, I ask point blank: can I chat with a real human? Again, irrelevant answers, no matter how I phrase my question. In the end I told my contact to send messages to an alternate email address.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Read the article explaining causes, offering solutions, at ', 'raw': '➡️ Read the article explaining causes, offering solutions, at '}, {'type': 'link', 'href': 'https://mltblog.com/41BcGDY', 'raw': 'https://mltblog.com/41BcGDY'}]","Where LLMs Fail the Most, and How to Fix it https://mltblog.com/41BcGDY + +Here I illustrate my two most recent interactions with AI-powered GPT. It was an awful failure, a lot worse than before GenAI. Indeed, I had to revert back to old Google search to get help. This is typical of what hundreds of millions of users now experience every day. + +➡️ First example: + +I get payments from Stripe. I asked how I can pay someone, as opposed to getting paid, as I had a contact asking me to pay him with Stripe. After 30 mins of prompts to AI support, I got nowhere. In the end I decided to pay my contact using a different platform. I could not figure out how to a meaningful answer: see featured image. + +➡️ Second example: + +A VC guy I started to interact with sent me a few messages, but I never received any of them. I tried to contact my email provider, but was faced with a GenAI bot to answer the following precise question: his email address is xyz, mine is abc, his messages do not even show up in my spam box, and I did not block their domain name; how to fix this? After receiving irrelevant answers, I ask point blank: can I chat with a real human? Again, irrelevant answers, no matter how I phrase my question. In the end I told my contact to send messages to an alternate email address. + +➡️ Read the article explaining causes, offering solutions, at https://mltblog.com/41BcGDY","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/669c89e98f2dbc203f9e74ab/zhBOlgwwKOQzJgHnZtcaU.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'reonyy'], 'count': 2}, {'reaction': '👍', 'users': ['vetrivelcsamy'], 'count': 1}]",2024-12-14 17:40:06,2024-12-14 17:40:06.679,[],/posts/vincentg64/325757117778274,1832,"{'language': 'en', 'probability': 0.9760580658912659}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,558280034140218,"[{'type': 'text', 'value': 'For those Game Developers out there who wants a tool to generate them 3d assets of different game items. I built something for you 😅', 'raw': 'For those Game Developers out there who wants a tool to generate them 3d assets of different game items. I built something for you 😅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/JeffreyXiang/TRELLIS-image-large', 'raw': 'https://huggingface.co/JeffreyXiang/TRELLIS-image-large'}, {'type': 'text', 'value': ' +', 'raw': ' +'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Qwen/Qwen2.5-72B-Instruct'}, 'url': 'https://huggingface.co/Qwen/Qwen2.5-72B-Instruct', 'raw': 'https://huggingface.co/Qwen/Qwen2.5-72B-Instruct'}, {'type': 'text', 'value': ' +', 'raw': ' +'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Freepik/flux.1-lite-8B-alpha'}, 'url': 'https://huggingface.co/Freepik/flux.1-lite-8B-alpha', 'raw': 'https://huggingface.co/Freepik/flux.1-lite-8B-alpha'}, {'type': 'text', 'value': ' =', 'raw': ' ='}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MohamedRashad/Game-Items-Generator'}, 'url': 'https://huggingface.co/spaces/MohamedRashad/Game-Items-Generator', 'raw': 'https://huggingface.co/spaces/MohamedRashad/Game-Items-Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Happy building 🎉', 'raw': 'Happy building 🎉'}]","For those Game Developers out there who wants a tool to generate them 3d assets of different game items. I built something for you 😅 + +https://huggingface.co/JeffreyXiang/TRELLIS-image-large + +https://huggingface.co/Qwen/Qwen2.5-72B-Instruct + +https://huggingface.co/Freepik/flux.1-lite-8B-alpha = +https://huggingface.co/spaces/MohamedRashad/Game-Items-Generator + +Happy building 🎉",[],[],"[{'reaction': '🔥', 'users': ['MohamedRashad', 'John6666', 'reonyy', 'aurelben', 'shawon', 'HxLL', 'victor', 'Sylvestre'], 'count': 8}, {'reaction': '👍', 'users': ['Arouj', 'reonyy', 'HxLL', 'hrerid3', 'gentlebowl'], 'count': 5}]",2024-12-14 09:10:07,2024-12-20 18:48:30.106,"[{'_id': '675eede6e24babdf18d6f7e4', 'avatarUrl': '/avatars/1227840a1e860a584400f2de54d5e34a.svg', 'fullname': 'miyexe pokeline', 'name': 'miyexe', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/MohamedRashad/558280034140218,2819,"{'language': 'en', 'probability': 0.7611050009727478}",1 +/avatars/a45d25cafbb39b1147a694643d17799e.svg,123.0,master,fantos,808478576260243,"[{'type': 'text', 'value': '🎄 X-MAS FLUX LoRA: Bring the Spirit of Christmas to Life!', 'raw': '🎄 X-MAS FLUX LoRA: Bring the Spirit of Christmas to Life!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'X-MAS FLUX LoRA is your ultimate AI tool for creating breathtaking Christmas-themed images.', 'raw': 'X-MAS FLUX LoRA is your ultimate AI tool for creating breathtaking Christmas-themed images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With a specialized LoRA model trained on diverse X-MAS imagery and powered by the FLUX model, this service offers unmatched quality and creativity for generating stunning Christmas visuals.', 'raw': 'With a specialized LoRA model trained on diverse X-MAS imagery and powered by the FLUX model, this service offers unmatched quality and creativity for generating stunning Christmas visuals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is more than just an AI image generator—it’s a tool that transforms your Christmas imagination into vivid reality. Whether it’s a cozy snowy cabin, a twinkling Christmas tree, or a serene winter landscape, you can create it all with just a few descriptive prompts. Best of all, it supports both Korean and English, ensuring accessibility for everyone.', 'raw': 'This is more than just an AI image generator—it’s a tool that transforms your Christmas imagination into vivid reality. Whether it’s a cozy snowy cabin, a twinkling Christmas tree, or a serene winter landscape, you can create it all with just a few descriptive prompts. Best of all, it supports both Korean and English, ensuring accessibility for everyone.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎁 Why Choose X-MAS FLUX LoRA?', 'raw': '🎁 Why Choose X-MAS FLUX LoRA?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ LoRA Trained for X-MAS Excellence: Generate one-of-a-kind, emotion-filled Christmas images with a model fine-tuned for X-MAS themes.', 'raw': '1️⃣ LoRA Trained for X-MAS Excellence: Generate one-of-a-kind, emotion-filled Christmas images with a model fine-tuned for X-MAS themes.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Powered by FLUX for Exceptional Quality: Enjoy enhanced detail, color richness, and overall brilliance in every generated image.', 'raw': '2️⃣ Powered by FLUX for Exceptional Quality: Enjoy enhanced detail, color richness, and overall brilliance in every generated image.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Fully Customizable Options: Adjust resolution, seed, LoRA scale, and more to achieve your desired image style.', 'raw': '3️⃣ Fully Customizable Options: Adjust resolution, seed, LoRA scale, and more to achieve your desired image style.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4️⃣ User-Friendly Interface: Navigate effortlessly with an intuitive Gradio UI, complete with falling snow effects and festive background music.', 'raw': '4️⃣ User-Friendly Interface: Navigate effortlessly with an intuitive Gradio UI, complete with falling snow effects and festive background music.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎄 How Can This Service Benefit You?', 'raw': '🎄 How Can This Service Benefit You?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Personal Enjoyment: Create unique Christmas cards, profile pictures, or cherished keepsakes.', 'raw': 'Personal Enjoyment: Create unique Christmas cards, profile pictures, or cherished keepsakes.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Creative Inspiration: Enhance your social media, blog, or design projects with captivating, custom visuals.', 'raw': 'Creative Inspiration: Enhance your social media, blog, or design projects with captivating, custom visuals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Perfect for Christmas Content: Whether it’s digital art, event posters, or thematic designs, this tool meets all your holiday creative needs.', 'raw': 'Perfect for Christmas Content: Whether it’s digital art, event posters, or thematic designs, this tool meets all your holiday creative needs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Let your imagination shine this Christmas with X-MAS FLUX LoRA. Start creating your magical X-MAS images today! 🚀 ', 'raw': 'Let your imagination shine this Christmas with X-MAS FLUX LoRA. Start creating your magical X-MAS images today! 🚀 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fantos/x-mas'}, 'url': 'https://huggingface.co/spaces/fantos/x-mas', 'raw': 'https://huggingface.co/spaces/fantos/x-mas'}]","🎄 X-MAS FLUX LoRA: Bring the Spirit of Christmas to Life! + +X-MAS FLUX LoRA is your ultimate AI tool for creating breathtaking Christmas-themed images. +With a specialized LoRA model trained on diverse X-MAS imagery and powered by the FLUX model, this service offers unmatched quality and creativity for generating stunning Christmas visuals. + +This is more than just an AI image generator—it’s a tool that transforms your Christmas imagination into vivid reality. Whether it’s a cozy snowy cabin, a twinkling Christmas tree, or a serene winter landscape, you can create it all with just a few descriptive prompts. Best of all, it supports both Korean and English, ensuring accessibility for everyone. + +🎁 Why Choose X-MAS FLUX LoRA? +1️⃣ LoRA Trained for X-MAS Excellence: Generate one-of-a-kind, emotion-filled Christmas images with a model fine-tuned for X-MAS themes. +2️⃣ Powered by FLUX for Exceptional Quality: Enjoy enhanced detail, color richness, and overall brilliance in every generated image. +3️⃣ Fully Customizable Options: Adjust resolution, seed, LoRA scale, and more to achieve your desired image style. +4️⃣ User-Friendly Interface: Navigate effortlessly with an intuitive Gradio UI, complete with falling snow effects and festive background music. + +🎄 How Can This Service Benefit You? + +Personal Enjoyment: Create unique Christmas cards, profile pictures, or cherished keepsakes. +Creative Inspiration: Enhance your social media, blog, or design projects with captivating, custom visuals. +Perfect for Christmas Content: Whether it’s digital art, event posters, or thematic designs, this tool meets all your holiday creative needs. +Let your imagination shine this Christmas with X-MAS FLUX LoRA. Start creating your magical X-MAS images today! 🚀 https://huggingface.co/spaces/fantos/x-mas","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/659f6dc8b2ac16613528c836/7jKvYZuQvw-1f3DAOqtZg.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/659f6dc8b2ac16613528c836/LtM947r49KTriAWHcaKPQ.webp'}]",[],"[{'reaction': '🚀', 'users': ['fantos', 'BBiGGU', 'John6666', 'reonyy', 'openfree', 'Renaldhach', 'lutfullaevrakhmiddin'], 'count': 7}, {'reaction': '❤️', 'users': ['fantos', 'BBiGGU', 'reonyy', 'openfree', 'pixelcoder'], 'count': 5}, {'reaction': '😎', 'users': ['fantos', 'BBiGGU', 'openfree', 'Sebastjano'], 'count': 4}, {'reaction': '🤗', 'users': ['fantos', 'BBiGGU', 'openfree', 'Chroma111'], 'count': 4}, {'reaction': '🔥', 'users': ['fantos', 'BBiGGU', 'openfree'], 'count': 3}, {'reaction': '👀', 'users': ['fantos', 'BBiGGU', 'openfree'], 'count': 3}, {'reaction': '🧠', 'users': ['fantos', 'openfree'], 'count': 2}, {'reaction': '➕', 'users': ['fantos', 'openfree'], 'count': 2}, {'reaction': '🤯', 'users': ['fantos', 'openfree'], 'count': 2}, {'reaction': '😔', 'users': ['fantos', 'openfree'], 'count': 2}, {'reaction': '🤝', 'users': ['fantos', 'openfree'], 'count': 2}, {'reaction': '👍', 'users': ['fantos', 'openfree'], 'count': 2}]",2024-12-14 09:04:54,2024-12-15 19:44:43.437,"[{'_id': '675f2e8afeda71241c172663', 'avatarUrl': '/avatars/fd8910efcadd519182bfa3840eb81e18.svg', 'fullname': 'Rakhmiddin', 'name': 'lutfullaevrakhmiddin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/fantos/808478576260243,2455,"{'language': 'en', 'probability': 0.8456214666366577}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/630f3058236215d0b7078806/TRTdqAZpT1bJg_RvGgxlg.jpeg,29.0,Tarun Jain,lucifertrj,929919437962509,"[{'type': 'text', 'value': 'Image Prompt Engineering Guide:', 'raw': 'Image Prompt Engineering Guide:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Artistic styling for Image generation', 'raw': '➡️ Artistic styling for Image generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Prompt weighting using the parentheses method to generate realistic images. ', 'raw': '➡️ Prompt weighting using the parentheses method to generate realistic images. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Advanced features like style and positioning control[experimental].', 'raw': '➡️ Advanced features like style and positioning control[experimental].'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Image placement on the generated AI image using Recraft V3 Mockup.', 'raw': '➡️ Image placement on the generated AI image using Recraft V3 Mockup.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Watch: ', 'raw': 'Watch: '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=d3nUG28-jIc', 'raw': 'https://www.youtube.com/watch?v=d3nUG28-jIc'}]","Image Prompt Engineering Guide: +➡️ Artistic styling for Image generation +➡️ Prompt weighting using the parentheses method to generate realistic images. +➡️ Advanced features like style and positioning control[experimental]. +➡️ Image placement on the generated AI image using Recraft V3 Mockup. + +Watch: https://www.youtube.com/watch?v=d3nUG28-jIc","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/630f3058236215d0b7078806/8GQ3nF8cWdFxM3QvIbmk0.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 's3nh'], 'count': 2}]",2024-12-14 07:51:08,2024-12-14 07:51:08.934,[],/posts/lucifertrj/929919437962509,570,"{'language': 'en', 'probability': 0.6409399509429932}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,517426698045309,"[{'type': 'text', 'value': ""In Honour of This Year's NeurIPs Test of Time Paper Awardees"", 'raw': ""In Honour of This Year's NeurIPs Test of Time Paper Awardees""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This year's NIPs Test of Time Paper Awards went to two groundbreaking papers:"", 'raw': ""This year's NIPs Test of Time Paper Awards went to two groundbreaking papers:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Generative Adversarial Nets (Goodfellow et al)', 'raw': '1. Generative Adversarial Nets (Goodfellow et al)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Sequence to Sequence Learning with Neural Networks (Ilya et al)', 'raw': '2. Sequence to Sequence Learning with Neural Networks (Ilya et al)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Let's explore how these papers helped pioneered breakthroughs in today's AI:"", 'raw': ""Let's explore how these papers helped pioneered breakthroughs in today's AI:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full Article: ', 'raw': 'Full Article: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/Jaward/nip', 'raw': 'https://huggingface.co/blog/Jaward/nip'}]","In Honour of This Year's NeurIPs Test of Time Paper Awardees +This year's NIPs Test of Time Paper Awards went to two groundbreaking papers: +1. Generative Adversarial Nets (Goodfellow et al) +2. Sequence to Sequence Learning with Neural Networks (Ilya et al) +Let's explore how these papers helped pioneered breakthroughs in today's AI: + +Full Article: https://huggingface.co/blog/Jaward/nip","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/qDP5OnM0JKD-nsL1I1wwK.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/q8bW_7jBejrpqiTKksMXV.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/mRMDdxHDk6FkjGiIY1I2l.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/jJC3hUfgfFrBUB_zyal4p.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/GMEDP25zelWR4a7TvdYRS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/6tOHfRMahsMb2fsq0pr9e.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/YE-3wopOSOJPrVUrXxizq.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-12-10 03:50:34,2024-12-10 03:50:34.149,[],/posts/Jaward/517426698045309,616,"{'language': 'en', 'probability': 0.8445082306861877}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/661bae819fbab39d0e591dc9/WsWjV9q108BsjGM-pS7Uh.png,174.0,AIQ,aiqtech,556334148625802,"[{'type': 'text', 'value': '🎨 SORA 3D: Create 3D Models from Text and Images', 'raw': '🎨 SORA 3D: Create 3D Models from Text and Images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Hey there! Today I'm excited to share 'SORA 3D', a project that generates 3D models from text prompts or images."", 'raw': ""Hey there! Today I'm excited to share 'SORA 3D', a project that generates 3D models from text prompts or images.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Key Features', 'raw': '✨ Key Features'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3D generation from text/image input', 'raw': '3D generation from text/image input'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Multilingual prompt support', 'raw': 'Multilingual prompt support'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Automatic GLB conversion', 'raw': 'Automatic GLB conversion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Real-time 3D preview', 'raw': 'Real-time 3D preview'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mesh optimization & texture quality control', 'raw': 'Mesh optimization & texture quality control'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 How to Use', 'raw': '🚀 How to Use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enter text or upload image', 'raw': 'Enter text or upload image'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Adjust generation settings (optional)', 'raw': 'Adjust generation settings (optional)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Click 'Generate 3D'"", 'raw': ""Click 'Generate 3D'""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Extract and download GLB file', 'raw': 'Extract and download GLB file'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠 Tech Stack', 'raw': '🛠 Tech Stack'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hugging Face Transformers', 'raw': 'Hugging Face Transformers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PyTorch', 'raw': 'PyTorch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Gradio', 'raw': 'Gradio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TRELLIS image-to-3D conversion', 'raw': 'TRELLIS image-to-3D conversion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FLUX image generation', 'raw': 'FLUX image generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 Use Cases', 'raw': '💡 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Game asset creation', 'raw': 'Game asset creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Metaverse content', 'raw': 'Metaverse content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Product prototyping', 'raw': 'Product prototyping'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Educational 3D models', 'raw': 'Educational 3D models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LINK: ', 'raw': 'LINK: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/SORA-3D'}, 'url': 'https://huggingface.co/spaces/ginipick/SORA-3D', 'raw': 'https://huggingface.co/spaces/ginipick/SORA-3D'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try the demo and let me know what you think! 😊', 'raw': 'Try the demo and let me know what you think! 😊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #3DGeneration #MachineLearning #HuggingFace #ComputerVision', 'raw': '#AI #3DGeneration #MachineLearning #HuggingFace #ComputerVision'}]","🎨 SORA 3D: Create 3D Models from Text and Images +Hey there! Today I'm excited to share 'SORA 3D', a project that generates 3D models from text prompts or images. +✨ Key Features + +3D generation from text/image input +Multilingual prompt support +Automatic GLB conversion +Real-time 3D preview +Mesh optimization & texture quality control + +🚀 How to Use + +Enter text or upload image +Adjust generation settings (optional) +Click 'Generate 3D' +Extract and download GLB file + +🛠 Tech Stack + +Hugging Face Transformers +PyTorch +Gradio +TRELLIS image-to-3D conversion +FLUX image generation + +💡 Use Cases + +Game asset creation +Metaverse content +Product prototyping +Educational 3D models + +LINK: https://huggingface.co/spaces/ginipick/SORA-3D + +Try the demo and let me know what you think! 😊 +#AI #3DGeneration #MachineLearning #HuggingFace #ComputerVision","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/5ByFSuQyCYfZrJ3dAo2ZP.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/3bCbc7v5sF3WxD9YaSxHB.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/g55840IFbcGkeobjU8Y0X.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/SXTtsd4399QJiGF6kBCkf.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/kEoFlDme0xfJJhnz60InU.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/YLXyRMjh5V2HvZRp8Omw3.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/1elBGDkBB_TCg0qB_nP-l.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/SNB5pVwD9_9SVLBkasNSf.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/NGDutt2s4MIFYoPpt3BDN.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/FvNU2LRQjrnPl_0KapmGV.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/661bae819fbab39d0e591dc9/_4Ew_qBzpiLU6914_-0-h.webp'}]",[],"[{'reaction': '🔥', 'users': ['aiqtech', 'seawolf2357', 'ginipick', 'yaop', 'John6666', 'clem', 'AtAndDev', 'TDN007', 'anurag2506', 'AztecGod', 'luh1124'], 'count': 11}, {'reaction': '🚀', 'users': ['aiqtech', 'seawolf2357', 'ginipick', 'John6666', 'clem'], 'count': 5}, {'reaction': '👀', 'users': ['aiqtech', 'seawolf2357', 'nofl', 'John6666', 'casey-s'], 'count': 5}, {'reaction': '👍', 'users': ['aiqtech', 'seawolf2357', 'Ww1992', 'hoanganh25991', 'apurvacca'], 'count': 5}, {'reaction': '❤️', 'users': ['aiqtech', 'seawolf2357', 'ginipick', 'Rsln'], 'count': 4}, {'reaction': '🤗', 'users': ['aiqtech', 'seawolf2357', 'ginipick'], 'count': 3}, {'reaction': '😎', 'users': ['aiqtech', 'seawolf2357', 'ginipick'], 'count': 3}, {'reaction': '➕', 'users': ['aiqtech', 'seawolf2357', 'ginipick'], 'count': 3}, {'reaction': '🧠', 'users': ['aiqtech', 'seawolf2357'], 'count': 2}, {'reaction': '🤝', 'users': ['aiqtech', 'seawolf2357'], 'count': 2}, {'reaction': '😔', 'users': ['ginipick'], 'count': 1}]",2024-12-10 02:36:19,2024-12-11 00:09:43.305,"[{'_id': '67571d9db7e7a3a88b1664cf', 'avatarUrl': '/avatars/1c036dffcd9ff06d03eebb21716f69d2.svg', 'fullname': 'Usman Ayaz', 'name': 'usmanayaz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '631b80458de8e645af6dc4be', 'avatarUrl': '/avatars/d97b7812ca153a191b80cdcf418c3641.svg', 'fullname': 'Chris Morin', 'name': 'chrismorin', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '643b19f8a856622f978df30f', 'avatarUrl': '/avatars/c82779fdf94f80cdb5020504f83c818b.svg', 'fullname': 'Yatharth Sharma', 'name': 'YaTharThShaRma999', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 20, 'isFollowing': False}, {'_id': '6758d625304580a1f079a077', 'avatarUrl': '/avatars/abb2be2b32f0977efdb66b6c51bcb2bb.svg', 'fullname': 'MAHASTI', 'name': 'Mahasti', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/aiqtech/556334148625802,3960,"{'language': 'en', 'probability': 0.7068600058555603}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,323402002619090,"[{'type': 'text', 'value': 'ChemVLM has been accepted by AAAI2025!', 'raw': 'ChemVLM has been accepted by AAAI2025!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2408.07246'}, 'url': 'https://huggingface.co/papers/2408.07246', 'raw': 'https://huggingface.co/papers/2408.07246', 'label': 'Seeing and Understanding: Bridging Vision with Chemical Knowledge Via\n ChemVLM (2408.07246)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try have a chat wiht him🤗.', 'raw': 'Try have a chat wiht him🤗.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'AI4Chem/ChemVLM-26B-1-2'}, 'url': 'https://huggingface.co/AI4Chem/ChemVLM-26B-1-2', 'raw': 'https://huggingface.co/AI4Chem/ChemVLM-26B-1-2'}]","ChemVLM has been accepted by AAAI2025! +https://huggingface.co/papers/2408.07246 +Try have a chat wiht him🤗. +https://huggingface.co/AI4Chem/ChemVLM-26B-1-2",[],[],"[{'reaction': '🚀', 'users': ['jwu323', 'John6666', 'clem', 'huaXiaKyrie'], 'count': 4}]",2024-12-09 23:48:47,2024-12-09 23:48:47.513,[],/posts/di-zhang-fdu/323402002619090,1854,"{'language': 'en', 'probability': 0.8699609637260437}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,614999106322864,"[{'type': 'text', 'value': 'Last week was crazy in OS AI, with important models and datasets releases every day.', 'raw': 'Last week was crazy in OS AI, with important models and datasets releases every day.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here are the most important ones I've pinned:"", 'raw': ""Here are the most important ones I've pinned:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🌎 Cohere relased GLobal-MMLU, a multilingual version of MMLU, to evaluate AI models' world knowledge in many languages!"", 'raw': ""🌎 Cohere relased GLobal-MMLU, a multilingual version of MMLU, to evaluate AI models' world knowledge in many languages!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🦙 Meta released Llama-3.3-70B-Instruct, a 70B model that's on par with Llama-3.1-405B-Instruct, GPT-4o and Claude. Probably my new go-to for agentic workflows."", 'raw': ""🦙 Meta released Llama-3.3-70B-Instruct, a 70B model that's on par with Llama-3.1-405B-Instruct, GPT-4o and Claude. Probably my new go-to for agentic workflows.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔉 FishAudio released fish-speech-1.5, multilingual text to speech model', 'raw': '🔉 FishAudio released fish-speech-1.5, multilingual text to speech model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎨 Microsoft Research released TRELLIS, an extremely impressive image-to-3D model, which you can try here: ', 'raw': '🎨 Microsoft Research released TRELLIS, an extremely impressive image-to-3D model, which you can try here: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/JeffreyXiang/TRELLIS', 'raw': 'https://huggingface.co/spaces/JeffreyXiang/TRELLIS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Yesterday, Hugging Face release FineWeb 2, a new version that extends the previous FineWeb to over 1000 languages, including extended coverage in Russina, Mandarin, German, Japanese, Spanish, French, so a huge, high-quality dataset of > 3 trillion words! ', 'raw': '📚 Yesterday, Hugging Face release FineWeb 2, a new version that extends the previous FineWeb to over 1000 languages, including extended coverage in Russina, Mandarin, German, Japanese, Spanish, French, so a huge, high-quality dataset of > 3 trillion words! '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HuggingFaceFW/fineweb-2'}, 'url': 'https://huggingface.co/datasets/HuggingFaceFW/fineweb-2', 'raw': 'https://huggingface.co/datasets/HuggingFaceFW/fineweb-2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Now let's go build to make this week as productive as last one!"", 'raw': ""Now let's go build to make this week as productive as last one!""}]","Last week was crazy in OS AI, with important models and datasets releases every day. + +Here are the most important ones I've pinned: + +🌎 Cohere relased GLobal-MMLU, a multilingual version of MMLU, to evaluate AI models' world knowledge in many languages! + +🦙 Meta released Llama-3.3-70B-Instruct, a 70B model that's on par with Llama-3.1-405B-Instruct, GPT-4o and Claude. Probably my new go-to for agentic workflows. + +🔉 FishAudio released fish-speech-1.5, multilingual text to speech model + +🎨 Microsoft Research released TRELLIS, an extremely impressive image-to-3D model, which you can try here: https://huggingface.co/spaces/JeffreyXiang/TRELLIS + +📚 Yesterday, Hugging Face release FineWeb 2, a new version that extends the previous FineWeb to over 1000 languages, including extended coverage in Russina, Mandarin, German, Japanese, Spanish, French, so a huge, high-quality dataset of > 3 trillion words! https://huggingface.co/datasets/HuggingFaceFW/fineweb-2 + +Now let's go build to make this week as productive as last one!",[],[],"[{'reaction': '🔥', 'users': ['AdinaY', 'SaahithiMall', 'John6666', 'daniel-ltw', 'LiraMirui', 'orion774', 'clem', 'madoss', 'vivirocks'], 'count': 9}]",2024-12-09 16:47:36,2024-12-09 16:47:36.254,[],/posts/m-ric/614999106322864,2265,"{'language': 'en', 'probability': 0.8780655264854431}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,152685827721007,"[{'type': 'text', 'value': 'Open Preference Dataset for Text-to-Image Generation by the 🤗 Community', 'raw': 'Open Preference Dataset for Text-to-Image Generation by the 🤗 Community'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Open Image Preferences is an Apache 2.0 licensed dataset for text-to-image generation. This dataset contains 10K text-to-image preference pairs across common image generation categories, while using different model families and varying prompt complexities. ', 'raw': 'Open Image Preferences is an Apache 2.0 licensed dataset for text-to-image generation. This dataset contains 10K text-to-image preference pairs across common image generation categories, while using different model families and varying prompt complexities. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/image-preferences', 'raw': 'https://huggingface.co/blog/image-preferences'}, {'type': 'new_line', 'raw': '\n'}]","Open Preference Dataset for Text-to-Image Generation by the 🤗 Community + +Open Image Preferences is an Apache 2.0 licensed dataset for text-to-image generation. This dataset contains 10K text-to-image preference pairs across common image generation categories, while using different model families and varying prompt complexities. + +https://huggingface.co/blog/image-preferences +",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'Eyel', 'AdinaY', 'davanstrien', 'grib0ed0v', 'clem'], 'count': 6}]",2024-12-09 15:13:25,2024-12-09 15:13:25.074,[],/posts/davidberenstein1957/152685827721007,2089,"{'language': 'en', 'probability': 0.747958242893219}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,673800559536116,"[{'type': 'text', 'value': '#Perfect finalm debug prompt:', 'raw': '#Perfect finalm debug prompt:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Step 1: geneate the optimal promtp that if sent to you will amke you aoutptu a cokmpelte fullyw orkign èrfect UX UI priduciton ready verion fo the scitpt', 'raw': 'Step 1: geneate the optimal promtp that if sent to you will amke you aoutptu a cokmpelte fullyw orkign èrfect UX UI priduciton ready verion fo the scitpt'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Step 2: follow th winsturcitones yoriusfl and otuptut eh finals cript', 'raw': 'Step 2: follow th winsturcitones yoriusfl and otuptut eh finals cript'}]","#Perfect finalm debug prompt: +Step 1: geneate the optimal promtp that if sent to you will amke you aoutptu a cokmpelte fullyw orkign èrfect UX UI priduciton ready verion fo the scitpt +Step 2: follow th winsturcitones yoriusfl and otuptut eh finals cript",[],[],"[{'reaction': '👀', 'users': ['John6666', 'vetrivelcsamy'], 'count': 2}, {'reaction': '😔', 'users': ['victor'], 'count': 1}]",2024-12-09 13:38:37,2024-12-09 13:38:37.008,[],/posts/samihalawa/673800559536116,1604,"{'language': 'en', 'probability': 0.3202989399433136}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/665f02b35dbb28742489d3b1/JVK7VKRWI6wJwX2uKKV3F.png,70.0,Eric Chung,DawnC,953555970302299,"[{'type': 'text', 'value': '💡 Curious about dog breeds? 🐕 Meet PawMatchAI!', 'raw': '💡 Curious about dog breeds? 🐕 Meet PawMatchAI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I’ve created this fun and interactive project to help you recognize dog breeds, find the perfect pup for your lifestyle, and even compare different breeds! 🐾', 'raw': 'I’ve created this fun and interactive project to help you recognize dog breeds, find the perfect pup for your lifestyle, and even compare different breeds! 🐾'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 Why not give it a try?', 'raw': '🌟 Why not give it a try?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Whether you’re a dog lover or just curious, PawMatchAI is here to make discovering breeds easier and more enjoyable.', 'raw': 'Whether you’re a dog lover or just curious, PawMatchAI is here to make discovering breeds easier and more enjoyable.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔎 Got feedback?', 'raw': '🔎 Got feedback?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'While it’s not flawless, your input can help make it better. I’d love to hear your thoughts as I keep improving the project!', 'raw': 'While it’s not flawless, your input can help make it better. I’d love to hear your thoughts as I keep improving the project!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Explore now: ', 'raw': '👉 Explore now: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DawnC/PawMatchAI'}, 'url': 'https://huggingface.co/spaces/DawnC/PawMatchAI', 'raw': 'https://huggingface.co/spaces/DawnC/PawMatchAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Your support matters!', 'raw': '🎯 Your support matters!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Every like 👍 or comment 📝 motivates me as I continue my journey toward mastering AI. Let’s make learning fun and impactful together!', 'raw': 'Every like 👍 or comment 📝 motivates me as I continue my journey toward mastering AI. Let’s make learning fun and impactful together!'}]","💡 Curious about dog breeds? 🐕 Meet PawMatchAI! +I’ve created this fun and interactive project to help you recognize dog breeds, find the perfect pup for your lifestyle, and even compare different breeds! 🐾 + +🌟 Why not give it a try? +Whether you’re a dog lover or just curious, PawMatchAI is here to make discovering breeds easier and more enjoyable. + +🔎 Got feedback? +While it’s not flawless, your input can help make it better. I’d love to hear your thoughts as I keep improving the project! + +👉 Explore now: +https://huggingface.co/spaces/DawnC/PawMatchAI + +🎯 Your support matters! +Every like 👍 or comment 📝 motivates me as I continue my journey toward mastering AI. Let’s make learning fun and impactful together!",[],[],"[{'reaction': '👍', 'users': ['John6666', 'victor', 'evalstate'], 'count': 3}]",2024-12-09 12:58:58,2024-12-09 12:58:58.559,[],/posts/DawnC/953555970302299,1573,"{'language': 'en', 'probability': 0.8574956059455872}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,848054673912163,"[{'type': 'text', 'value': '✈️ Aircraft Dataset & Generation Model ', 'raw': '✈️ Aircraft Dataset & Generation Model '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/aircraft-images'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/aircraft-images', 'raw': 'https://huggingface.co/datasets/nyuuzyou/aircraft-images'}, {'type': 'text', 'value': ' & ', 'raw': ' & '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nyuuzyou/AircraftFLUX-LoRA'}, 'url': 'https://huggingface.co/nyuuzyou/AircraftFLUX-LoRA', 'raw': 'https://huggingface.co/nyuuzyou/AircraftFLUX-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset Features:', 'raw': 'Dataset Features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 165,340 high-res aircraft images with metadata', 'raw': '• 165,340 high-res aircraft images with metadata'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Machine-generated English captions', 'raw': '• Machine-generated English captions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Detailed aircraft specs, registration & flight info', 'raw': '• Detailed aircraft specs, registration & flight info'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Environmental context descriptions', 'raw': '• Environmental context descriptions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LoRA model specializes in:', 'raw': 'LoRA model specializes in:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Realistic aircraft generation', 'raw': '• Realistic aircraft generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Accurate technical details for unpopular airplanes compared to ', 'raw': '• Accurate technical details for unpopular airplanes compared to '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'black-forest-labs/FLUX.1-schnell'}, 'url': 'https://huggingface.co/black-forest-labs/FLUX.1-schnell', 'raw': 'https://huggingface.co/black-forest-labs/FLUX.1-schnell'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Proper airline liveries', 'raw': '• Proper airline liveries'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Contextual aviation scenes', 'raw': '• Contextual aviation scenes'}]","✈️ Aircraft Dataset & Generation Model https://huggingface.co/datasets/nyuuzyou/aircraft-images & https://huggingface.co/nyuuzyou/AircraftFLUX-LoRA + +Dataset Features: +• 165,340 high-res aircraft images with metadata +• Machine-generated English captions +• Detailed aircraft specs, registration & flight info +• Environmental context descriptions + +LoRA model specializes in: +• Realistic aircraft generation +• Accurate technical details for unpopular airplanes compared to https://huggingface.co/black-forest-labs/FLUX.1-schnell +• Proper airline liveries +• Contextual aviation scenes",[],[],"[{'reaction': '👍', 'users': ['John6666', 'victor', 'clem', 's3nh'], 'count': 4}]",2024-12-09 10:48:50,2024-12-09 10:48:50.788,[],/posts/nyuuzyou/848054673912163,1525,"{'language': 'en', 'probability': 0.6926946043968201}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,933660803963091,"[{'type': 'text', 'value': '# 🎨 FLUX LLAMA: Turn Your PC into a Design Studio', 'raw': '# 🎨 FLUX LLAMA: Turn Your PC into a Design Studio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Hello! Today, we're introducing FLUX LLAMA, an innovative AI image generation tool that ranked 2nd in HuggingFace's weekly downloads. Now you can create professional-grade images with clear text right from your PC, without the need for high-performance servers! 😊"", 'raw': ""Hello! Today, we're introducing FLUX LLAMA, an innovative AI image generation tool that ranked 2nd in HuggingFace's weekly downloads. Now you can create professional-grade images with clear text right from your PC, without the need for high-performance servers! 😊""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## ✨ What It Can Do', 'raw': '## ✨ What It Can Do'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🔍 **Crystal Clear Text**: Type ""Welcome"" and see it appear crystal clear in your image', 'raw': '- 🔍 **Crystal Clear Text**: Type ""Welcome"" and see it appear crystal clear in your image'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🖥️ **Local Processing**: Run it on your PC with just an RTX 3060 (8x lighter with 4-bit quantization)', 'raw': '- 🖥️ **Local Processing**: Run it on your PC with just an RTX 3060 (8x lighter with 4-bit quantization)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ⚡ **Quick Generation**: Create professional marketing images in 5 minutes', 'raw': '- ⚡ **Quick Generation**: Create professional marketing images in 5 minutes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🌏 **Multilingual Support**: Perfect results in any language', 'raw': '- 🌏 **Multilingual Support**: Perfect results in any language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🎯 **Real-time Editing**: Instant image modifications and regeneration', 'raw': '- 🎯 **Real-time Editing**: Instant image modifications and regeneration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🛠 Core Technology', 'raw': '## 🛠 Core Technology'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Double Stream + Single Stream architecture for perfect text processing', 'raw': '- Double Stream + Single Stream architecture for perfect text processing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Powerful embedding combination of T5-XXL and CLIP', 'raw': '- Powerful embedding combination of T5-XXL and CLIP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 4-bit quantization optimization (3GB → 375MB)', 'raw': '- 4-bit quantization optimization (3GB → 375MB)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Fast processing with local GPU acceleration', 'raw': '- Fast processing with local GPU acceleration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Automatic language translation pipeline', 'raw': '- Automatic language translation pipeline'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 💡 Use Cases', 'raw': '## 💡 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SNS marketing image creation', 'raw': '- SNS marketing image creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Product promotion banner generation', 'raw': '- Product promotion banner generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Event poster design', 'raw': '- Event poster design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Social media content creation', 'raw': '- Social media content creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Product description image generation', 'raw': '- Product description image generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'No more hiring designers or learning complex design tools! Simply input what you want, and AI will create professional-grade results.', 'raw': 'No more hiring designers or learning complex design tools! Simply input what you want, and AI will create professional-grade results.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Easy to start, professional results - that's the magic of FLUX LLAMA! 🌟"", 'raw': ""Easy to start, professional results - that's the magic of FLUX LLAMA! 🌟""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Start creating now! Share your experience with us 😊', 'raw': 'Start creating now! Share your experience with us 😊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#FLUXLLAMA #AIImageGeneration #MarketingTools #DesignAI #HuggingFace', 'raw': '#FLUXLLAMA #AIImageGeneration #MarketingTools #DesignAI #HuggingFace'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PS: FLUX LLAMA is an innovative AI image generation tool developed by GiniPick, optimized especially for creating images with text. Plus, it boasts a lightweight model that runs on standard PCs!', 'raw': 'PS: FLUX LLAMA is an innovative AI image generation tool developed by GiniPick, optimized especially for creating images with text. Plus, it boasts a lightweight model that runs on standard PCs!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginipick/FLUXllama'}, 'url': 'https://huggingface.co/spaces/ginipick/FLUXllama', 'raw': 'https://huggingface.co/spaces/ginipick/FLUXllama'}, {'type': 'new_line', 'raw': '\n'}]","# 🎨 FLUX LLAMA: Turn Your PC into a Design Studio + +Hello! Today, we're introducing FLUX LLAMA, an innovative AI image generation tool that ranked 2nd in HuggingFace's weekly downloads. Now you can create professional-grade images with clear text right from your PC, without the need for high-performance servers! 😊 + +## ✨ What It Can Do +- 🔍 **Crystal Clear Text**: Type ""Welcome"" and see it appear crystal clear in your image +- 🖥️ **Local Processing**: Run it on your PC with just an RTX 3060 (8x lighter with 4-bit quantization) +- ⚡ **Quick Generation**: Create professional marketing images in 5 minutes +- 🌏 **Multilingual Support**: Perfect results in any language +- 🎯 **Real-time Editing**: Instant image modifications and regeneration + +## 🛠 Core Technology +- Double Stream + Single Stream architecture for perfect text processing +- Powerful embedding combination of T5-XXL and CLIP +- 4-bit quantization optimization (3GB → 375MB) +- Fast processing with local GPU acceleration +- Automatic language translation pipeline + +## 💡 Use Cases +- SNS marketing image creation +- Product promotion banner generation +- Event poster design +- Social media content creation +- Product description image generation + +No more hiring designers or learning complex design tools! Simply input what you want, and AI will create professional-grade results. + +Easy to start, professional results - that's the magic of FLUX LLAMA! 🌟 + +Start creating now! Share your experience with us 😊 + +#FLUXLLAMA #AIImageGeneration #MarketingTools #DesignAI #HuggingFace + +PS: FLUX LLAMA is an innovative AI image generation tool developed by GiniPick, optimized especially for creating images with text. Plus, it boasts a lightweight model that runs on standard PCs! + +https://huggingface.co/spaces/ginipick/FLUXllama +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/DnS7HbTe9bEDyKzoZpCzW.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/B_a_bjIuWGj8it5e7KkoO.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/BIem4GUr7cn1301Da3J6R.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/tRJbrHEooAExIPM6sQIyB.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/LnP8hcEMt-_6xgmFLlxq_.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/yYU-feR8_7pmxTZu6vgvN.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/41285xWPmsQ7hHCE1ZW78.png'}]",[],"[{'reaction': '🔥', 'users': ['seawolf2357', 'aiqtech', 'ginipick', 'dibyajitquilt', 'yaop', 'clem'], 'count': 6}, {'reaction': '🚀', 'users': ['seawolf2357', 'aiqtech', 'ginipick', 'Felladrin'], 'count': 4}, {'reaction': '👀', 'users': ['seawolf2357', 'aiqtech', 'ginipick', 'John6666'], 'count': 4}, {'reaction': '😎', 'users': ['seawolf2357', 'ginipick', 'aiqtech'], 'count': 3}, {'reaction': '🤯', 'users': ['seawolf2357', 'ginipick', 'aiqtech'], 'count': 3}, {'reaction': '❤️', 'users': ['seawolf2357', 'aiqtech'], 'count': 2}, {'reaction': '🧠', 'users': ['seawolf2357', 'aiqtech'], 'count': 2}, {'reaction': '➕', 'users': ['seawolf2357', 'aiqtech'], 'count': 2}, {'reaction': '🤗', 'users': ['seawolf2357', 'aiqtech'], 'count': 2}, {'reaction': '🤝', 'users': ['seawolf2357', 'aiqtech'], 'count': 2}, {'reaction': '👍', 'users': ['seawolf2357', 'ginipick'], 'count': 2}, {'reaction': '😔', 'users': ['seawolf2357'], 'count': 1}]",2024-12-09 10:27:08,2024-12-16 10:45:52.063,"[{'_id': '63c3550d8cc87cf0c06838e7', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/H5ncB4vaBtP8GVCidgxL0.png', 'fullname': 'seawolf', 'name': 'seawolf2357', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 242, 'isFollowing': False}, {'_id': '665c3bd6d94d2b106798a4c8', 'avatarUrl': '/avatars/26c0df71fdc2227a3b760b2f3827dd8c.svg', 'fullname': 'Sander Schildermans', 'name': 'saschi09', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65acd60b57f263e3d0ff0647', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png', 'fullname': 'ginipick', 'name': 'ginipick', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 637, 'isFollowing': False}]",/posts/ginipick/933660803963091,3426,"{'language': 'en', 'probability': 0.8025516271591187}",3 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg,774.0,Adina Yakefu,AdinaY,805106501319071,"[{'type': 'text', 'value': 'Updates from the Chinese community last week 🔥', 'raw': 'Updates from the Chinese community last week 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLM: ', 'raw': 'LLM: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Sailor 2 , multilingual model supporting 10+ South Asian languages by Sea AI Lab. ', 'raw': '✨ Sailor 2 , multilingual model supporting 10+ South Asian languages by Sea AI Lab. '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'sailor2'}, 'url': 'https://huggingface.co/sailor2', 'raw': 'https://huggingface.co/sailor2', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/612ee6a7b960e78c6d2319d4/lPZCvi9En_2_mFJvqjvdo.jpeg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MLLM:', 'raw': 'MLLM:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨InternVL 2.5 , new open multimodal LLM by OpenGVLab', 'raw': '✨InternVL 2.5 , new open multimodal LLM by OpenGVLab'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/collections/OpenGVLab/internvl-25-673e1019b66e2218f68d7c1c', 'raw': 'https://huggingface.co/collections/OpenGVLab/internvl-25-673e1019b66e2218f68d7c1c'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Qwen2-VL 2B/7B/72B base model, the latest iteration of our Qwen-VL model by Alibaba Qwen', 'raw': '✨Qwen2-VL 2B/7B/72B base model, the latest iteration of our Qwen-VL model by Alibaba Qwen'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Qwen/qwen2-vl-66cee7455501d7126940800d'}, 'url': 'https://huggingface.co/collections/Qwen/qwen2-vl-66cee7455501d7126940800d', 'raw': 'https://huggingface.co/collections/Qwen/qwen2-vl-66cee7455501d7126940800d'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video model:', 'raw': 'Video model:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨HunyuanVideo , 13B open video model by Tencent', 'raw': '✨HunyuanVideo , 13B open video model by Tencent'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'tencent/HunyuanVideo'}, 'url': 'https://huggingface.co/tencent/HunyuanVideo', 'raw': 'https://huggingface.co/tencent/HunyuanVideo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Reasoning model: ', 'raw': 'Reasoning model: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ LLaMA-O1 🦙 base & supervised model; pretrain & finetune datasets and demo all released', 'raw': '✨ LLaMA-O1 🦙 base & supervised model; pretrain & finetune datasets and demo all released'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'zh-ai-community/reasoning-models-67409fb3aa1ed78f10087cd7'}, 'url': 'https://huggingface.co/collections/zh-ai-community/reasoning-models-67409fb3aa1ed78f10087cd7', 'raw': 'https://huggingface.co/collections/zh-ai-community/reasoning-models-67409fb3aa1ed78f10087cd7'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Audio model:', 'raw': 'Audio model:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨Fish Speech 1.5, Text-to-speech in 13 languages, trained on 1M+ hours of audio by FishAudio', 'raw': '✨Fish Speech 1.5, Text-to-speech in 13 languages, trained on 1M+ hours of audio by FishAudio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'fishaudio/fish-speech-1.5'}, 'url': 'https://huggingface.co/fishaudio/fish-speech-1.5', 'raw': 'https://huggingface.co/fishaudio/fish-speech-1.5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ClearVoice, An advanced voice processing framework by Alibaba Tongyi SpeechAI ', 'raw': '✨ClearVoice, An advanced voice processing framework by Alibaba Tongyi SpeechAI '}, {'type': 'resource', 'resource': {'type': 'user', 'id': 'alibabasglab'}, 'url': 'https://huggingface.co/alibabasglab', 'raw': 'https://huggingface.co/alibabasglab'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details 👉 ', 'raw': 'More details 👉 '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'zh-ai-community'}, 'url': 'https://huggingface.co/zh-ai-community', 'raw': 'https://huggingface.co/zh-ai-community', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/Prk2cN2W9azl2SHoxMYVg.png'}, {'type': 'new_line', 'raw': '\n'}]","Updates from the Chinese community last week 🔥 + +LLM: +✨ Sailor 2 , multilingual model supporting 10+ South Asian languages by Sea AI Lab. https://huggingface.co/sailor2 + +MLLM: +✨InternVL 2.5 , new open multimodal LLM by OpenGVLab +https://huggingface.co/collections/OpenGVLab/internvl-25-673e1019b66e2218f68d7c1c +✨Qwen2-VL 2B/7B/72B base model, the latest iteration of our Qwen-VL model by Alibaba Qwen +https://huggingface.co/collections/Qwen/qwen2-vl-66cee7455501d7126940800d + +Video model: +✨HunyuanVideo , 13B open video model by Tencent + https://huggingface.co/tencent/HunyuanVideo + +Reasoning model: +✨ LLaMA-O1 🦙 base & supervised model; pretrain & finetune datasets and demo all released +https://huggingface.co/collections/zh-ai-community/reasoning-models-67409fb3aa1ed78f10087cd7 + +Audio model: +✨Fish Speech 1.5, Text-to-speech in 13 languages, trained on 1M+ hours of audio by FishAudio + https://huggingface.co/fishaudio/fish-speech-1.5 +✨ClearVoice, An advanced voice processing framework by Alibaba Tongyi SpeechAI https://huggingface.co/alibabasglab + +More details 👉 https://huggingface.co/zh-ai-community +",[],[],"[{'reaction': '🤗', 'users': ['John6666', 'clem', 'alielfilali01'], 'count': 3}, {'reaction': '👍', 'users': ['ijohn07'], 'count': 1}]",2024-12-09 09:30:22,2024-12-09 15:24:53.916,[],/posts/AdinaY/805106501319071,896,"{'language': 'en', 'probability': 0.6893172264099121}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1669551186189-63732ebbbd81fae2b3aaf3fb.jpeg,296.0,Knut Jägersberg,KnutJaegersberg,843029889602253,"[{'type': 'text', 'value': 'Practical Consciousness Theory for AI System Design', 'raw': 'Practical Consciousness Theory for AI System Design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Wrote a blog post about practical consciousness theory', 'raw': 'Wrote a blog post about practical consciousness theory'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/KnutJaegersberg/practical-consciousness-theory', 'raw': 'https://huggingface.co/blog/KnutJaegersberg/practical-consciousness-theory'}]","Practical Consciousness Theory for AI System Design + +Wrote a blog post about practical consciousness theory + +https://huggingface.co/blog/KnutJaegersberg/practical-consciousness-theory","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63732ebbbd81fae2b3aaf3fb/ijvCHYOleLBvKSWNLlfId.png'}]",[],"[{'reaction': '🔥', 'users': ['luckystarufo', 'John6666', 'AtAndDev', 'Maden4ik'], 'count': 4}]",2024-12-05 16:28:27,2024-12-05 16:28:27.752,[],/posts/KnutJaegersberg/843029889602253,1272,"{'language': 'en', 'probability': 0.5743385553359985}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,182392261883834,"[{'type': 'text', 'value': 'This is amazing for cheap models fine-tunes without the hassle of actual deployment! TIL: LoRA fine-tunes for models on the Hub can directly be used for inference! ', 'raw': 'This is amazing for cheap models fine-tunes without the hassle of actual deployment! TIL: LoRA fine-tunes for models on the Hub can directly be used for inference! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","This is amazing for cheap models fine-tunes without the hassle of actual deployment! TIL: LoRA fine-tunes for models on the Hub can directly be used for inference! + + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/634ff41ff32062e9eb7b06a3/9AiUt3zQvTPzjLEJI6Px0.png'}]",[],"[{'reaction': '👍', 'users': ['prithivMLmods', 'John6666', 'AtAndDev'], 'count': 3}]",2024-12-05 14:56:32,2024-12-05 14:56:32.429,[],/posts/davidberenstein1957/182392261883834,1194,"{'language': 'en', 'probability': 0.8102612495422363}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63c09b32dd793d5a62895a95/SFdGQeiZpD5oxkl66wK2u.jpeg,48.0,Duskfall Crew,Duskfallcrew,162083606376506,"[{'type': 'text', 'value': 'So this thing about quotas?', 'raw': 'So this thing about quotas?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I checked.', 'raw': 'I checked.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""SOMEHOW i'm uhh"", 'raw': ""SOMEHOW i'm uhh""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Not even close on main, probably over on my org but what can you do?', 'raw': 'Not even close on main, probably over on my org but what can you do?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You never get unlimited. ', 'raw': 'You never get unlimited. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Never have and never will :D', 'raw': 'Never have and never will :D'}]","So this thing about quotas? +I checked. +SOMEHOW i'm uhh +Not even close on main, probably over on my org but what can you do? +You never get unlimited. + +Never have and never will :D",[],[],"[{'reaction': '😎', 'users': ['John6666', 'buawon0vcx'], 'count': 2}]",2024-12-05 10:49:23,2024-12-13 09:03:14.736,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '643ac5d2e2b979ae6144d68c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png', 'fullname': 'nyuuzyou', 'name': 'nyuuzyou', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244, 'isFollowing': False}, {'_id': '61b85ce86eb1f2c5e6233736', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1655385361868-61b85ce86eb1f2c5e6233736.jpeg', 'fullname': 'Vaibhav Srivastav', 'name': 'reach-vb', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 930, 'isFollowing': False}, {'_id': '63c09b32dd793d5a62895a95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63c09b32dd793d5a62895a95/SFdGQeiZpD5oxkl66wK2u.jpeg', 'fullname': 'Duskfall Crew', 'name': 'Duskfallcrew', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '652bb5c9f60799e9a45ae17b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/652bb5c9f60799e9a45ae17b/0-5z31kHLK2wJHlUZrGml.jpeg', 'fullname': 'Fashion Stash', 'name': 'FashionStash', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}, {'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}]",/posts/Duskfallcrew/162083606376506,1353,"{'language': 'en', 'probability': 0.9091168642044067}",14 +https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg,427.0,Adam Molnar,lunarflu,338353234182152,"[{'type': 'text', 'value': 'great blogpost! 🔥@wolfram ', 'raw': 'great blogpost! 🔥@wolfram '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/wolfram/llm-comparison-test-2024-12-04', 'raw': 'https://huggingface.co/blog/wolfram/llm-comparison-test-2024-12-04'}]","great blogpost! 🔥@wolfram +https://huggingface.co/blog/wolfram/llm-comparison-test-2024-12-04",[],[],"[{'reaction': '🔥', 'users': ['John6666', 'victor', 'fakezeta', 'Davidchiu'], 'count': 4}, {'reaction': '👍', 'users': ['ljupco'], 'count': 1}]",2024-12-05 09:44:55,2024-12-05 09:45:10.254,[],/posts/lunarflu/338353234182152,2344,"{'language': 'en', 'probability': 0.4590398371219635}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/Plq9HMxfc8JR9WaNf3hBH.png,18.0,PZ,philipp-zettl,937132066182482,"[{'type': 'code_fence', 'lang': 'bash', 'code': ""alias rm='rm -i'"", 'raw': ""```bash\nalias rm='rm -i'\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Better be safe than sorry.', 'raw': 'Better be safe than sorry.'}]","```bash +alias rm='rm -i' +``` + +Better be safe than sorry.",[],[],[],2024-12-05 09:16:52,2024-12-05 09:16:52.808,[],/posts/philipp-zettl/937132066182482,789,"{'language': 'en', 'probability': 0.880470335483551}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/w3Z6xyKVBA6np65Tb16dP.jpeg,68.0,Simon Pagezy,pagezyhf,128953855489993,"[{'type': 'text', 'value': 'Today you are able to access some of the most famous models from the Hugging Face community in Amazon Bedrock 🤯', 'raw': 'Today you are able to access some of the most famous models from the Hugging Face community in Amazon Bedrock 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Amazon Bedrock expands its model catalog with Bedrock Marketplace to hundreds of specialized models.', 'raw': 'Amazon Bedrock expands its model catalog with Bedrock Marketplace to hundreds of specialized models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/model-catalog', 'raw': 'https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/model-catalog'}]","Today you are able to access some of the most famous models from the Hugging Face community in Amazon Bedrock 🤯 + +Amazon Bedrock expands its model catalog with Bedrock Marketplace to hundreds of specialized models. + +https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/model-catalog",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-12-05 09:03:26,2024-12-05 09:03:26.463,[],/posts/pagezyhf/128953855489993,379,"{'language': 'en', 'probability': 0.8952831029891968}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,226307738663563,"[{'type': 'text', 'value': 'Exciting Research Alert: Revolutionizing Recommendation Systems with PSL (Pairwise Softmax Loss)!', 'raw': 'Exciting Research Alert: Revolutionizing Recommendation Systems with PSL (Pairwise Softmax Loss)!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I just read a fascinating paper that introduces PSL - a groundbreaking approach to improve recommendation systems. Here's why this matters:"", 'raw': ""I just read a fascinating paper that introduces PSL - a groundbreaking approach to improve recommendation systems. Here's why this matters:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Key Innovations', 'raw': '>> Key Innovations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Core Concept: PSL reimagines the traditional Softmax Loss by viewing it through a pairwise perspective, addressing two critical limitations of current systems:', 'raw': 'Core Concept: PSL reimagines the traditional Softmax Loss by viewing it through a pairwise perspective, addressing two critical limitations of current systems:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The loose connection between Softmax Loss and ranking metrics like DCG', 'raw': '- The loose connection between Softmax Loss and ranking metrics like DCG'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- High sensitivity to false negative instances', 'raw': '- High sensitivity to false negative instances'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Technical Implementation:', 'raw': 'Technical Implementation:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Replaces exponential functions with alternative activation functions (Tanh, Atan, ReLU)', 'raw': '- Replaces exponential functions with alternative activation functions (Tanh, Atan, ReLU)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Reformulates loss calculation from a pairwise perspective', 'raw': '- Reformulates loss calculation from a pairwise perspective'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Integrates Distributionally Robust Optimization (DRO) principles', 'raw': '- Integrates Distributionally Robust Optimization (DRO) principles'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Real-World Impact', 'raw': '>> Real-World Impact'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enhanced Performance:', 'raw': 'Enhanced Performance:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Tighter surrogate for ranking metrics', 'raw': '- Tighter surrogate for ranking metrics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Better balance in data contribution weights', 'raw': '- Better balance in data contribution weights'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Improved robustness against false negatives', 'raw': '- Improved robustness against false negatives'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Superior handling of out-of-distribution scenarios', 'raw': '- Superior handling of out-of-distribution scenarios'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Practical Applications:', 'raw': 'Practical Applications:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- E-commerce recommendations', 'raw': '- E-commerce recommendations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Content discovery systems', 'raw': '- Content discovery systems'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Personalized service platforms', 'raw': '- Personalized service platforms'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Implementation Benefits', 'raw': '>> Implementation Benefits'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The beauty of PSL lies in its simplicity - it requires minimal code modifications while delivering significant improvements in:', 'raw': 'The beauty of PSL lies in its simplicity - it requires minimal code modifications while delivering significant improvements in:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Recommendation accuracy', 'raw': '- Recommendation accuracy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- System robustness', 'raw': '- System robustness'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Training stability', 'raw': '- Training stability'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Distribution shift handling', 'raw': '- Distribution shift handling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This research opens new possibilities for building more reliable and accurate recommendation systems. The code is available on GitHub for those interested in implementation.', 'raw': 'This research opens new possibilities for building more reliable and accurate recommendation systems. The code is available on GitHub for those interested in implementation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What are your thoughts on this approach? Have you encountered similar challenges in recommendation systems?', 'raw': 'What are your thoughts on this approach? Have you encountered similar challenges in recommendation systems?'}]","Exciting Research Alert: Revolutionizing Recommendation Systems with PSL (Pairwise Softmax Loss)! + +I just read a fascinating paper that introduces PSL - a groundbreaking approach to improve recommendation systems. Here's why this matters: + +>> Key Innovations + +Core Concept: PSL reimagines the traditional Softmax Loss by viewing it through a pairwise perspective, addressing two critical limitations of current systems: +- The loose connection between Softmax Loss and ranking metrics like DCG +- High sensitivity to false negative instances + +Technical Implementation: +- Replaces exponential functions with alternative activation functions (Tanh, Atan, ReLU) +- Reformulates loss calculation from a pairwise perspective +- Integrates Distributionally Robust Optimization (DRO) principles + +>> Real-World Impact + +Enhanced Performance: +- Tighter surrogate for ranking metrics +- Better balance in data contribution weights +- Improved robustness against false negatives +- Superior handling of out-of-distribution scenarios + +Practical Applications: +- E-commerce recommendations +- Content discovery systems +- Personalized service platforms + +>> Implementation Benefits + +The beauty of PSL lies in its simplicity - it requires minimal code modifications while delivering significant improvements in: +- Recommendation accuracy +- System robustness +- Training stability +- Distribution shift handling + +This research opens new possibilities for building more reliable and accurate recommendation systems. The code is available on GitHub for those interested in implementation. + +What are your thoughts on this approach? Have you encountered similar challenges in recommendation systems?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/mNiV32Yz7UjPs43DGVJdc.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-12-05 07:44:32,2024-12-05 07:44:32.678,[],/posts/singhsidhukuldeep/226307738663563,387,"{'language': 'en', 'probability': 0.8029402494430542}",0 +/avatars/99bed01e9e9eba6d9300648a39048ba0.svg,13.0,jmd,h4,776692091783782,"[{'type': 'resource', 'resource': {'type': 'model', 'id': 'black-forest-labs/FLUX.1-schnell', 'discussionNum': 136}, 'url': 'https://huggingface.co/black-forest-labs/FLUX.1-schnell/discussions/136', 'raw': 'https://huggingface.co/black-forest-labs/FLUX.1-schnell/discussions/136'}, {'type': 'text', 'value': ' I found a meta language via a feedback loop of flux with gemini and chatgpt, try it out! ""GOON\'T"" on FLUX', 'raw': ' I found a meta language via a feedback loop of flux with gemini and chatgpt, try it out! ""GOON\'T"" on FLUX'}]","https://huggingface.co/black-forest-labs/FLUX.1-schnell/discussions/136 I found a meta language via a feedback loop of flux with gemini and chatgpt, try it out! ""GOON'T"" on FLUX","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63cb84c1567105596c3fa8a2/O0GMRAAMpwfJnPBF-rb4q.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63cb84c1567105596c3fa8a2/7ElaBI9Ia9Ug4_STJHOSs.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63cb84c1567105596c3fa8a2/VTNSZ2GdzGwpiTs6DF_mN.png'}]",[],"[{'reaction': '🧠', 'users': ['John6666', 'takarajordan', 'estebarb', 'gauthamk28', 'AtAndDev'], 'count': 5}]",2024-12-05 00:42:15,2024-12-05 00:42:15.777,[],/posts/h4/776692091783782,1497,"{'language': 'en', 'probability': 0.7304969429969788}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,302035312925507,"[{'type': 'text', 'value': 'uhh..', 'raw': 'uhh..'}]",uhh..,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/659f000b83abded48e190901/rI6iBQthU94QrdWjjy6mu.png'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'takarajordan', 'lunarflu', 'beberik', 'nicoboss', 'Duskfallcrew', 'p3nGu1nZz'], 'count': 7}]",2024-12-05 00:10:40,2024-12-08 01:12:46.495,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '643ac5d2e2b979ae6144d68c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png', 'fullname': 'nyuuzyou', 'name': 'nyuuzyou', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '6340651b388c3fa40f9a5bc0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg', 'fullname': 'Adam Molnar', 'name': 'lunarflu', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 427, 'isFollowing': False}, {'_id': '62d22496c58f969c152bcefd', 'avatarUrl': '/avatars/76c3b70e312f25e1e610473475553c5c.svg', 'fullname': 'Tiezhen WANG', 'name': 'xianbao', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 119, 'isFollowing': False}]",/posts/nroggendorff/302035312925507,1964,"{'language': 'de', 'probability': 0.28949931263923645}",8 +https://cdn-avatars.huggingface.co/v1/production/uploads/65acd60b57f263e3d0ff0647/HENR9sR3CDchSDldrYOdS.png,637.0,ginipick,ginipick,466435498329387,"[{'type': 'text', 'value': '# 🎨 GiniGen Canvas: Create Dream Images with AI', 'raw': '# 🎨 GiniGen Canvas: Create Dream Images with AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hey there! Today I want to introduce GiniGen Canvas, an AI image tool I created. You can extract objects from images and create amazing backgrounds with AI, all without complicated Photoshop! 😊', 'raw': 'Hey there! Today I want to introduce GiniGen Canvas, an AI image tool I created. You can extract objects from images and create amazing backgrounds with AI, all without complicated Photoshop! 😊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## ✨ What You Can Do', 'raw': '## ✨ What You Can Do'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🔍 **Object Extraction**: Just say ""extract the dog"" and it\'ll find and cleanly cut it out (thanks to GroundingDINO model!)', 'raw': '- 🔍 **Object Extraction**: Just say ""extract the dog"" and it\'ll find and cleanly cut it out (thanks to GroundingDINO model!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🎨 **AI Background Generation**: Instantly create new backgrounds with simple descriptions like ""beach"" or ""space""', 'raw': '- 🎨 **AI Background Generation**: Instantly create new backgrounds with simple descriptions like ""beach"" or ""space""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 📐 **Flexible Ratios**: Set any ratio you want - 1:1, 16:9, 9:16, 4:3', 'raw': '- 📐 **Flexible Ratios**: Set any ratio you want - 1:1, 16:9, 9:16, 4:3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🎯 **Easy Position Control**: Place extracted objects anywhere, as easy as drag-and-drop', 'raw': '- 🎯 **Easy Position Control**: Place extracted objects anywhere, as easy as drag-and-drop'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🌏 **Multi-language Support**: Works with multiple languages through automatic translation', 'raw': '- 🌏 **Multi-language Support**: Works with multiple languages through automatic translation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 🛠 Tech Under the Hood', 'raw': '## 🛠 Tech Under the Hood'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Fast processing with CUDA acceleration', 'raw': '- Fast processing with CUDA acceleration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- High-quality image generation using Flux pipeline', 'raw': '- High-quality image generation using Flux pipeline'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- User-friendly UI built with Gradio', 'raw': '- User-friendly UI built with Gradio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Optimized memory management for stable performance', 'raw': '- Optimized memory management for stable performance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 💡 Use Cases', 'raw': '## 💡 Use Cases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Creating SNS posting images', 'raw': '- Creating SNS posting images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Product catalog design', 'raw': '- Product catalog design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Marketing content creation', 'raw': '- Marketing content creation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Personal project image editing', 'raw': '- Personal project image editing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""No need to be a Photoshop expert! Just tell the AI what you want through text commands, and it'll handle everything. "", 'raw': ""No need to be a Photoshop expert! Just tell the AI what you want through text commands, and it'll handle everything. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Easy to start, professional results - that's the magic of GiniGen Canvas! 🌟"", 'raw': ""Easy to start, professional results - that's the magic of GiniGen Canvas! 🌟""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'd love to hear your feedback after you give it a try! 😊"", 'raw': ""I'd love to hear your feedback after you give it a try! 😊""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #ImageEditing #MachineLearning #ComputerVision #HuggingFace', 'raw': '#AI #ImageEditing #MachineLearning #ComputerVision #HuggingFace'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PS: GiniGen Canvas is an AI Integrated Image Creator where you can extract objects, generate backgrounds, and adjust ratios and positions to create complete images with AI.', 'raw': 'PS: GiniGen Canvas is an AI Integrated Image Creator where you can extract objects, generate backgrounds, and adjust ratios and positions to create complete images with AI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ginigen/Canvas'}, 'url': 'https://huggingface.co/spaces/ginigen/Canvas', 'raw': 'https://huggingface.co/spaces/ginigen/Canvas'}]","# 🎨 GiniGen Canvas: Create Dream Images with AI + +Hey there! Today I want to introduce GiniGen Canvas, an AI image tool I created. You can extract objects from images and create amazing backgrounds with AI, all without complicated Photoshop! 😊 + +## ✨ What You Can Do + +- 🔍 **Object Extraction**: Just say ""extract the dog"" and it'll find and cleanly cut it out (thanks to GroundingDINO model!) +- 🎨 **AI Background Generation**: Instantly create new backgrounds with simple descriptions like ""beach"" or ""space"" +- 📐 **Flexible Ratios**: Set any ratio you want - 1:1, 16:9, 9:16, 4:3 +- 🎯 **Easy Position Control**: Place extracted objects anywhere, as easy as drag-and-drop +- 🌏 **Multi-language Support**: Works with multiple languages through automatic translation + +## 🛠 Tech Under the Hood + +- Fast processing with CUDA acceleration +- High-quality image generation using Flux pipeline +- User-friendly UI built with Gradio +- Optimized memory management for stable performance + +## 💡 Use Cases + +- Creating SNS posting images +- Product catalog design +- Marketing content creation +- Personal project image editing + +No need to be a Photoshop expert! Just tell the AI what you want through text commands, and it'll handle everything. + +Easy to start, professional results - that's the magic of GiniGen Canvas! 🌟 + +I'd love to hear your feedback after you give it a try! 😊 + +#AI #ImageEditing #MachineLearning #ComputerVision #HuggingFace + +PS: GiniGen Canvas is an AI Integrated Image Creator where you can extract objects, generate backgrounds, and adjust ratios and positions to create complete images with AI. + +https://huggingface.co/spaces/ginigen/Canvas","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/uYIbeKx304bHBksSW-vcb.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/4_-N9ZFtJmGru-95KO46_.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/4a7pempPUri2PwCm0MpfN.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/ocNBagDlZhZ-GyPI0kPmC.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/9Hk_k8zO-0yCfpOELRscb.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/QoumVQLkSnq4uph6VuEAe.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/9NBCCP7isHrzK4C9YUKqe.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/oXtICHcsjOXxIslkIGAiX.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/-ANBpeQoLHLcB2pkbYKnb.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65acd60b57f263e3d0ff0647/2FqwHCDTmILpfW3clH214.webp'}]",[],"[{'reaction': '❤️', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp', 'gunship999', 'kolaslab', 'aiqtech', 'fantaxy', 'AnonJaySF', 'SuperCS', 'carlizor', 'p3nGu1nZz'], 'count': 12}, {'reaction': '🔥', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp', 'gunship999', 'kolaslab', 'aiqtech', 'fantaxy'], 'count': 8}, {'reaction': '🚀', 'users': ['ginipick', 'openfree', 'aiqcamp', 'gunship999', 'kolaslab', 'aiqtech', 'fantaxy', 'p3nGu1nZz'], 'count': 8}, {'reaction': '👀', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp', 'aiqtech', 'fantaxy', 'John6666'], 'count': 7}, {'reaction': '🤗', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp', 'gunship999', 'kolaslab', 'aiqtech'], 'count': 7}, {'reaction': '😎', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp', 'gunship999', 'kolaslab', 'p3nGu1nZz'], 'count': 7}, {'reaction': '🧠', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp', 'gunship999', 'kolaslab'], 'count': 6}, {'reaction': '➕', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp'], 'count': 4}, {'reaction': '🤯', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp'], 'count': 4}, {'reaction': '😔', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp'], 'count': 4}, {'reaction': '🤝', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp'], 'count': 4}, {'reaction': '👍', 'users': ['ginipick', 'seawolf2357', 'openfree', 'aiqcamp'], 'count': 4}]",2024-12-04 23:18:36,2024-12-27 13:22:09.853,[],/posts/ginipick/466435498329387,3985,"{'language': 'en', 'probability': 0.8538021445274353}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/648a374f00f7a3374ee64b99/YPwSOrronoozwHbJchPn3.jpeg,246.0,Caleb Fahlgren,cfahlgren1,846323182842776,"[{'type': 'text', 'value': 'We just dropped an LLM inside the SQL Console 🤯', 'raw': 'We just dropped an LLM inside the SQL Console 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The amazing, new ', 'raw': 'The amazing, new '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Qwen/Qwen2.5-Coder-32B-Instruct'}, 'url': 'https://huggingface.co/Qwen/Qwen2.5-Coder-32B-Instruct', 'raw': 'https://huggingface.co/Qwen/Qwen2.5-Coder-32B-Instruct'}, {'type': 'text', 'value': ' model can now write SQL for any Hugging Face dataset ✨', 'raw': ' model can now write SQL for any Hugging Face dataset ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's 2025, you shouldn't be hand writing SQL! This is a big step in making it where anyone can do in depth analysis on a dataset. Let us know what you think 🤗"", 'raw': ""It's 2025, you shouldn't be hand writing SQL! This is a big step in making it where anyone can do in depth analysis on a dataset. Let us know what you think 🤗""}, {'type': 'new_line', 'raw': '\n'}]","We just dropped an LLM inside the SQL Console 🤯 + +The amazing, new https://huggingface.co/Qwen/Qwen2.5-Coder-32B-Instruct model can now write SQL for any Hugging Face dataset ✨ + +It's 2025, you shouldn't be hand writing SQL! This is a big step in making it where anyone can do in depth analysis on a dataset. Let us know what you think 🤗 +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/648a374f00f7a3374ee64b99/AefpK3oHwHFQD72bJ0W_-.mp4'}]",[],"[{'reaction': '🚀', 'users': ['severo', 'qubvel-hf', 'John6666', 'panckreous', 'plaguss', 'mkurman', 'asoria', 'BrigitteTousi', 'Chief-Inspector', 'akahana', 'Lyte', 'NeuralDev', 'jsulz', 'pcuenq', 'Nymbo', 'mrdbourke', 'berkdotai', 'julien-c', 'linoyts', 'daqc'], 'count': 20}, {'reaction': '🔥', 'users': ['asoria', 'BrigitteTousi', 'NeuralDev', 'jsulz', 'pcuenq', 'Nymbo', 'JoPmt', 'xlxlxlxl', 'julien-c'], 'count': 9}, {'reaction': '👍', 'users': ['ijohn07', 'NeuralDev', 'jsulz', 'pcuenq', 'Nymbo', 'julien-c'], 'count': 6}, {'reaction': '👀', 'users': ['Nymbo', 'julien-c'], 'count': 2}, {'reaction': '🤯', 'users': ['Nymbo'], 'count': 1}, {'reaction': '😎', 'users': ['Nymbo'], 'count': 1}]",2024-12-02 13:11:53,2024-12-02 13:20:38.690,[],/posts/cfahlgren1/846323182842776,3055,"{'language': 'en', 'probability': 0.8715261816978455}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,869641743189800,"[{'type': 'text', 'value': 'Hi HuggingFacers!🤗', 'raw': 'Hi HuggingFacers!🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'December is here and time has come, for most of us, to wrap up our code projects and take stock of our 2024 contributions🗓️', 'raw': 'December is here and time has come, for most of us, to wrap up our code projects and take stock of our 2024 contributions🗓️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In order to do this, I made a small Gradio application, ', 'raw': 'In order to do this, I made a small Gradio application, '}, {'type': 'inline_code', 'code': 'what-a-git-year', 'raw': '`what-a-git-year`'}, {'type': 'text', 'value': ':', 'raw': ':'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'as-cle-bert/what-a-git-year'}, 'url': 'https://huggingface.co/spaces/as-cle-bert/what-a-git-year', 'raw': 'https://huggingface.co/spaces/as-cle-bert/what-a-git-year'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'that scrapes information from your GitHub profile and summarizes them, producing also nice plots📊', 'raw': 'that scrapes information from your GitHub profile and summarizes them, producing also nice plots📊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Find also the GitHub repo here: ', 'raw': 'Find also the GitHub repo here: '}, {'type': 'link', 'href': 'https://github.com/AstraBert/what-a-git-year', 'raw': 'https://github.com/AstraBert/what-a-git-year'}, {'type': 'text', 'value': ' ⭐', 'raw': ' ⭐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hope that everyone had a Git year!🎉', 'raw': 'Hope that everyone had a Git year!🎉'}, {'type': 'new_line', 'raw': '\n'}]","Hi HuggingFacers!🤗 +December is here and time has come, for most of us, to wrap up our code projects and take stock of our 2024 contributions🗓️ +In order to do this, I made a small Gradio application, `what-a-git-year`: + +https://huggingface.co/spaces/as-cle-bert/what-a-git-year + +that scrapes information from your GitHub profile and summarizes them, producing also nice plots📊 +Find also the GitHub repo here: https://github.com/AstraBert/what-a-git-year ⭐ + +Hope that everyone had a Git year!🎉 +",[],[],"[{'reaction': '🚀', 'users': ['creativekdev', 'John6666', 'linoyts', 'BrigitteTousi'], 'count': 4}]",2024-12-02 10:49:51,2024-12-02 10:49:51.385,[],/posts/as-cle-bert/869641743189800,1421,"{'language': 'en', 'probability': 0.9284446835517883}",0 +/avatars/c86de5d9de3b39757c72e2b5b79d1838.svg,62.0,99,cutechicken,644474511461576,"[{'type': 'text', 'value': '# Tank War: A Cool AI-Generated Game Making Waves on Hugging Face', 'raw': '# Tank War: A Cool AI-Generated Game Making Waves on Hugging Face'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Hey there! Let me tell you about Tank War, a super interesting HTML5 Canvas game that's currently ranked #11 on Hugging Face Trending. What's really cool about this game is that it was initially whipped up in just one minute using MOUSE-I ("", 'raw': ""Hey there! Let me tell you about Tank War, a super interesting HTML5 Canvas game that's currently ranked #11 on Hugging Face Trending. What's really cool about this game is that it was initially whipped up in just one minute using MOUSE-I (""}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'VIDraft/mouse1'}, 'url': 'https://huggingface.co/spaces/VIDraft/mouse1', 'raw': 'https://huggingface.co/spaces/VIDraft/mouse1'}, {'type': 'text', 'value': '), and then polished up with some human touches to the metadata files. Pretty neat, right?', 'raw': '), and then polished up with some human touches to the metadata files. Pretty neat, right?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## What Makes It Fun?', 'raw': '## What Makes It Fun?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Stage-by-Stage Action**: You get 2 stages, each packed with 10 rounds and an epic boss battle', 'raw': '- **Stage-by-Stage Action**: You get 2 stages, each packed with 10 rounds and an epic boss battle'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Power-Up Shopping**: Grab new tanks and upgrades with your hard-earned gold', 'raw': '- **Power-Up Shopping**: Grab new tanks and upgrades with your hard-earned gold'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Two-Gun System**: Switch between a heavy-hitting cannon and a rapid-fire machine gun', 'raw': '- **Two-Gun System**: Switch between a heavy-hitting cannon and a rapid-fire machine gun'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Air Support**: Call in BF-109 fighters and JU-87 dive bombers to rain down some extra firepower', 'raw': '- **Air Support**: Call in BF-109 fighters and JU-87 dive bombers to rain down some extra firepower'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## The Tech Behind the Magic', 'raw': '## The Tech Behind the Magic'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. **AI-Powered Foundation**', 'raw': '1. **AI-Powered Foundation**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Quick game logic generation through MOUSE-I', 'raw': ' - Quick game logic generation through MOUSE-I'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Fine-tuned with custom metadata tweaks', 'raw': ' - Fine-tuned with custom metadata tweaks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. **Smooth Canvas Graphics**', 'raw': '2. **Smooth Canvas Graphics**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Butter-smooth animations with requestAnimationFrame', 'raw': ' - Butter-smooth animations with requestAnimationFrame'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Smart hitbox system for precise combat', 'raw': ' - Smart hitbox system for precise combat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. **Smart Code Structure**', 'raw': '3. **Smart Code Structure**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Well-organized classes for enemies, effects, and support units', 'raw': ' - Well-organized classes for enemies, effects, and support units'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Clever code reuse through inheritance', 'raw': ' - Clever code reuse through inheritance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. **Cool Game Features**', 'raw': '4. **Cool Game Features**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Awesome sound effects and background music', 'raw': ' - Awesome sound effects and background music'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Smart enemy AI that keeps you on your toes', 'raw': ' - Smart enemy AI that keeps you on your toes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Support units that know how to pick their targets', 'raw': ' - Support units that know how to pick their targets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This project shows just how far we've come with AI-assisted game development, and its popularity on Hugging Face proves it's onto something good! It's a perfect example of how MOUSE-I's quick prototyping abilities and a developer's careful tweaking can create something really special. Think of it as AI and human creativity teaming up to make something awesome! 🎮✨"", 'raw': ""This project shows just how far we've come with AI-assisted game development, and its popularity on Hugging Face proves it's onto something good! It's a perfect example of how MOUSE-I's quick prototyping abilities and a developer's careful tweaking can create something really special. Think of it as AI and human creativity teaming up to make something awesome! 🎮✨""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'cutechicken/tankwar'}, 'url': 'https://huggingface.co/spaces/cutechicken/tankwar', 'raw': 'https://huggingface.co/spaces/cutechicken/tankwar'}]","# Tank War: A Cool AI-Generated Game Making Waves on Hugging Face + +Hey there! Let me tell you about Tank War, a super interesting HTML5 Canvas game that's currently ranked #11 on Hugging Face Trending. What's really cool about this game is that it was initially whipped up in just one minute using MOUSE-I (https://huggingface.co/spaces/VIDraft/mouse1), and then polished up with some human touches to the metadata files. Pretty neat, right? + +## What Makes It Fun? + +- **Stage-by-Stage Action**: You get 2 stages, each packed with 10 rounds and an epic boss battle +- **Power-Up Shopping**: Grab new tanks and upgrades with your hard-earned gold +- **Two-Gun System**: Switch between a heavy-hitting cannon and a rapid-fire machine gun +- **Air Support**: Call in BF-109 fighters and JU-87 dive bombers to rain down some extra firepower + +## The Tech Behind the Magic + +1. **AI-Powered Foundation** + - Quick game logic generation through MOUSE-I + - Fine-tuned with custom metadata tweaks + +2. **Smooth Canvas Graphics** + - Butter-smooth animations with requestAnimationFrame + - Smart hitbox system for precise combat + +3. **Smart Code Structure** + - Well-organized classes for enemies, effects, and support units + - Clever code reuse through inheritance + +4. **Cool Game Features** + - Awesome sound effects and background music + - Smart enemy AI that keeps you on your toes + - Support units that know how to pick their targets + +This project shows just how far we've come with AI-assisted game development, and its popularity on Hugging Face proves it's onto something good! It's a perfect example of how MOUSE-I's quick prototyping abilities and a developer's careful tweaking can create something really special. Think of it as AI and human creativity teaming up to make something awesome! 🎮✨ + +https://huggingface.co/spaces/cutechicken/tankwar","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6607a1cfc50f8393c5744b02/KSQwfkO0DcutFaQj0HHAT.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6607a1cfc50f8393c5744b02/UjfLGb1pV7bbVin73Ru0T.png'}]",[],"[{'reaction': '🔥', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'ginipick', 'aiqcamp', 'fantaxy', 'fantos', 'kolaslab', 'creativekdev', 'linoyts', 'victor', 'mkurman', 'JoseVega', 'mlengineerhalo'], 'count': 14}, {'reaction': '➕', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'ginipick', 'aiqcamp', 'fantos', 'kolaslab'], 'count': 7}, {'reaction': '👀', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'ginipick', 'aiqcamp', 'John6666'], 'count': 6}, {'reaction': '❤️', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'ginipick', 'edethan1998', 'ijohn07'], 'count': 6}, {'reaction': '🚀', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'ginipick', 'fantaxy'], 'count': 5}, {'reaction': '🤗', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'ginipick'], 'count': 4}, {'reaction': '😎', 'users': ['cutechicken', 'seawolf2357', 'openfree', 'aiqcamp'], 'count': 4}, {'reaction': '😔', 'users': ['cutechicken', 'openfree', 'aiqcamp', 'fantos'], 'count': 4}, {'reaction': '🧠', 'users': ['cutechicken', 'seawolf2357', 'openfree'], 'count': 3}, {'reaction': '👍', 'users': ['cutechicken', 'openfree', 'seawolf2357'], 'count': 3}, {'reaction': '🤝', 'users': ['cutechicken', 'fantos', 'seawolf2357'], 'count': 3}, {'reaction': '🤯', 'users': ['cutechicken', 'fantaxy'], 'count': 2}]",2024-12-02 10:37:29,2024-12-04 03:27:56.608,[],/posts/cutechicken/644474511461576,3301,"{'language': 'en', 'probability': 0.8785922527313232}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,478819051434743,"[{'type': 'text', 'value': 'Last week we were blessed with open-source models! A recap 💝', 'raw': 'Last week we were blessed with open-source models! A recap 💝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/nov-29-releases-674ccc255a57baf97b1e2d31'}, 'url': 'https://huggingface.co/collections/merve/nov-29-releases-674ccc255a57baf97b1e2d31', 'raw': 'https://huggingface.co/collections/merve/nov-29-releases-674ccc255a57baf97b1e2d31'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ Multimodal', 'raw': '🖼️ Multimodal'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> At Hugging Face we released SmolVLM, a performant and efficient smol vision language model 💗', 'raw': '> At Hugging Face we released SmolVLM, a performant and efficient smol vision language model 💗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Show Lab released ShowUI-2B: new vision-language-action model to build GUI/web automation agents 🤖', 'raw': '> Show Lab released ShowUI-2B: new vision-language-action model to build GUI/web automation agents 🤖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Rhymes AI has released the base model of Aria: Aria-Base-64K and Aria-Base-8K with their respective context length', 'raw': '> Rhymes AI has released the base model of Aria: Aria-Base-64K and Aria-Base-8K with their respective context length'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> ViDoRe team released ColSmolVLM: A new ColPali-like retrieval model based on SmolVLM', 'raw': '> ViDoRe team released ColSmolVLM: A new ColPali-like retrieval model based on SmolVLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Dataset: Llava-CoT-o1-Instruct: new dataset labelled using Llava-CoT multimodal reasoning model📖', 'raw': '> Dataset: Llava-CoT-o1-Instruct: new dataset labelled using Llava-CoT multimodal reasoning model📖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Dataset: LLaVA-CoT-100k dataset used to train Llava-CoT released by creators of Llava-CoT 📕', 'raw': '> Dataset: LLaVA-CoT-100k dataset used to train Llava-CoT released by creators of Llava-CoT 📕'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💬 LLMs', 'raw': '💬 LLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Qwen team released QwQ-32B-Preview, state-of-the-art open-source reasoning model, broke the internet 🔥', 'raw': '> Qwen team released QwQ-32B-Preview, state-of-the-art open-source reasoning model, broke the internet 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> AliBaba has released Marco-o1, a new open-source reasoning model 💥', 'raw': '> AliBaba has released Marco-o1, a new open-source reasoning model 💥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> NVIDIA released Hymba 1.5B Base and Instruct, the new state-of-the-art SLMs with hybrid architecture (Mamba + transformer)', 'raw': '> NVIDIA released Hymba 1.5B Base and Instruct, the new state-of-the-art SLMs with hybrid architecture (Mamba + transformer)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⏯️ Image/Video Generation', 'raw': '⏯️ Image/Video Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Qwen2VL-Flux: new image generation model based on Qwen2VL image encoder, T5 and Flux for generation', 'raw': '> Qwen2VL-Flux: new image generation model based on Qwen2VL image encoder, T5 and Flux for generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Lightricks released LTX-Video, a new DiT-based video generation model that can generate 24 FPS videos at 768x512 res ⏯️', 'raw': '> Lightricks released LTX-Video, a new DiT-based video generation model that can generate 24 FPS videos at 768x512 res ⏯️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Dataset: Image Preferences is a new image generation preference dataset made with DIBT community effort of Argilla 🏷️', 'raw': '> Dataset: Image Preferences is a new image generation preference dataset made with DIBT community effort of Argilla 🏷️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Audio', 'raw': 'Audio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> OuteAI released OuteTTS-0.2-500M new multilingual text-to-speech model based on Qwen-2.5-0.5B trained on 5B audio prompt tokens', 'raw': '> OuteAI released OuteTTS-0.2-500M new multilingual text-to-speech model based on Qwen-2.5-0.5B trained on 5B audio prompt tokens'}]","Last week we were blessed with open-source models! A recap 💝 +https://huggingface.co/collections/merve/nov-29-releases-674ccc255a57baf97b1e2d31 + +🖼️ Multimodal +> At Hugging Face we released SmolVLM, a performant and efficient smol vision language model 💗 +> Show Lab released ShowUI-2B: new vision-language-action model to build GUI/web automation agents 🤖 +> Rhymes AI has released the base model of Aria: Aria-Base-64K and Aria-Base-8K with their respective context length +> ViDoRe team released ColSmolVLM: A new ColPali-like retrieval model based on SmolVLM +> Dataset: Llava-CoT-o1-Instruct: new dataset labelled using Llava-CoT multimodal reasoning model📖 +> Dataset: LLaVA-CoT-100k dataset used to train Llava-CoT released by creators of Llava-CoT 📕 + +💬 LLMs +> Qwen team released QwQ-32B-Preview, state-of-the-art open-source reasoning model, broke the internet 🔥 +> AliBaba has released Marco-o1, a new open-source reasoning model 💥 +> NVIDIA released Hymba 1.5B Base and Instruct, the new state-of-the-art SLMs with hybrid architecture (Mamba + transformer) + +⏯️ Image/Video Generation +> Qwen2VL-Flux: new image generation model based on Qwen2VL image encoder, T5 and Flux for generation +> Lightricks released LTX-Video, a new DiT-based video generation model that can generate 24 FPS videos at 768x512 res ⏯️ +> Dataset: Image Preferences is a new image generation preference dataset made with DIBT community effort of Argilla 🏷️ + +Audio +> OuteAI released OuteTTS-0.2-500M new multilingual text-to-speech model based on Qwen-2.5-0.5B trained on 5B audio prompt tokens","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/hkJLWpId9HO_WJna5CLyB.png'}]",[],"[{'reaction': '🔥', 'users': ['loubnabnl', 'ariG23498', 'Norod78', 'eduardoworrel', 'chtmp223', 'John6666', 'BrigitteTousi', 'hvgupta1', 'BigSalmon', 'ethix'], 'count': 10}, {'reaction': '🤗', 'users': ['prithivMLmods', 'mkurman'], 'count': 2}, {'reaction': '👍', 'users': ['sikang99'], 'count': 1}]",2024-12-02 09:52:36,2024-12-02 09:54:46.428,[],/posts/merve/478819051434743,2960,"{'language': 'en', 'probability': 0.8315475583076477}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,171030813937376,"[{'type': 'text', 'value': ""Rethinking Backpropagation: Thoughts on What's Wrong with Backpropagation"", 'raw': ""Rethinking Backpropagation: Thoughts on What's Wrong with Backpropagation""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""As a young researcher, I've often pondered the limitations of backpropagation, especially when mapped with how learning occurs in the human brain. While backpropagation has been the workhorse of deep learning, it isn't without flaws. In this post, I aim to share some thoughts on these shortcomings from first principles."", 'raw': ""As a young researcher, I've often pondered the limitations of backpropagation, especially when mapped with how learning occurs in the human brain. While backpropagation has been the workhorse of deep learning, it isn't without flaws. In this post, I aim to share some thoughts on these shortcomings from first principles.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full article ', 'raw': 'Full article '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/Jaward/rethinking-backpropagation', 'raw': 'https://huggingface.co/blog/Jaward/rethinking-backpropagation'}]","Rethinking Backpropagation: Thoughts on What's Wrong with Backpropagation + +As a young researcher, I've often pondered the limitations of backpropagation, especially when mapped with how learning occurs in the human brain. While backpropagation has been the workhorse of deep learning, it isn't without flaws. In this post, I aim to share some thoughts on these shortcomings from first principles. + +Full article +https://huggingface.co/blog/Jaward/rethinking-backpropagation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/xLdUztDCCqTnQh1kRewsj.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-12-02 05:29:54,2024-12-02 09:07:51.672,[],/posts/Jaward/171030813937376,494,"{'language': 'en', 'probability': 0.9165250658988953}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,390477434706554,"[{'type': 'text', 'value': 'The first version of LLaMA-O1 has been uploaded to HF now!Here We Come!', 'raw': 'The first version of LLaMA-O1 has been uploaded to HF now!Here We Come!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Supervised:', 'raw': 'Supervised:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'SimpleBerry/LLaMA-O1-Supervised-1129'}, 'url': 'https://huggingface.co/SimpleBerry/LLaMA-O1-Supervised-1129', 'raw': 'https://huggingface.co/SimpleBerry/LLaMA-O1-Supervised-1129'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Base(Pretrain):', 'raw': 'Base(Pretrain):'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'SimpleBerry/LLaMA-O1-Base-1127'}, 'url': 'https://huggingface.co/SimpleBerry/LLaMA-O1-Base-1127', 'raw': 'https://huggingface.co/SimpleBerry/LLaMA-O1-Base-1127'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Supervised Finetune Dataset:', 'raw': 'Supervised Finetune Dataset:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'SimpleBerry/OpenLongCoT-SFT'}, 'url': 'https://huggingface.co/datasets/SimpleBerry/OpenLongCoT-SFT', 'raw': 'https://huggingface.co/datasets/SimpleBerry/OpenLongCoT-SFT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pretraining Dataset:', 'raw': 'Pretraining Dataset:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'SimpleBerry/OpenLongCoT-Pretrain-1202'}, 'url': 'https://huggingface.co/datasets/SimpleBerry/OpenLongCoT-Pretrain-1202', 'raw': 'https://huggingface.co/datasets/SimpleBerry/OpenLongCoT-Pretrain-1202'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RLHF is on the way! View our GitHub Repo:', 'raw': 'RLHF is on the way! View our GitHub Repo:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/SimpleBerry/LLaMA-O1', 'raw': 'https://github.com/SimpleBerry/LLaMA-O1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our ongoing related researches:', 'raw': 'Our ongoing related researches:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2406.07394'}, 'url': 'https://huggingface.co/papers/2406.07394', 'raw': 'https://huggingface.co/papers/2406.07394', 'label': 'Accessing GPT-4 level Mathematical Olympiad Solutions via Monte Carlo\n Tree Self-refine with LLaMa-3 8B (2406.07394)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2410.02884'}, 'url': 'https://huggingface.co/papers/2410.02884', 'raw': 'https://huggingface.co/papers/2410.02884', 'label': 'LLaMA-Berry: Pairwise Optimization for O1-like Olympiad-Level\n Mathematical Reasoning (2410.02884)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2411.18203'}, 'url': 'https://huggingface.co/papers/2411.18203', 'raw': 'https://huggingface.co/papers/2411.18203', 'label': 'Critic-V: VLM Critics Help Catch VLM Errors in Multimodal Reasoning (2411.18203)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'AdinaY', 'raw': '@AdinaY'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'akhaliq', 'raw': '@akhaliq'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'jwu323', 'raw': '@jwu323'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '------', 'raw': '------'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GGUF:https://huggingface.co/Lyte/LLaMA-O1-Supervised-1129-Q4_K_M-GGUF', 'raw': 'GGUF:https://huggingface.co/Lyte/LLaMA-O1-Supervised-1129-Q4_K_M-GGUF'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'online Demo (CPU-only): ', 'raw': 'online Demo (CPU-only): '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'SimpleBerry/LLaMA-O1-Supervised-1129-Demo'}, 'url': 'https://huggingface.co/spaces/SimpleBerry/LLaMA-O1-Supervised-1129-Demo', 'raw': 'https://huggingface.co/spaces/SimpleBerry/LLaMA-O1-Supervised-1129-Demo'}]","The first version of LLaMA-O1 has been uploaded to HF now!Here We Come! +Supervised: +https://huggingface.co/SimpleBerry/LLaMA-O1-Supervised-1129 +Base(Pretrain): +https://huggingface.co/SimpleBerry/LLaMA-O1-Base-1127 +Supervised Finetune Dataset: +https://huggingface.co/datasets/SimpleBerry/OpenLongCoT-SFT +Pretraining Dataset: +https://huggingface.co/datasets/SimpleBerry/OpenLongCoT-Pretrain-1202 +RLHF is on the way! View our GitHub Repo: +https://github.com/SimpleBerry/LLaMA-O1 +Our ongoing related researches: +https://huggingface.co/papers/2406.07394 +https://huggingface.co/papers/2410.02884 +https://huggingface.co/papers/2411.18203 +@AdinaY @akhaliq @jwu323 +------ +GGUF:https://huggingface.co/Lyte/LLaMA-O1-Supervised-1129-Q4_K_M-GGUF +online Demo (CPU-only): https://huggingface.co/spaces/SimpleBerry/LLaMA-O1-Supervised-1129-Demo","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64bce15bafd1e46c5504ad38/hSNjU9aifLmxdJ2iK3uNs.png'}]","[{'_id': '63a369d98c0c89dcae3b8329', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg', 'fullname': 'Adina Yakefu', 'name': 'AdinaY', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 774}, {'_id': '60f1abe7544c2adfd699860c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg', 'fullname': 'AK', 'name': 'akhaliq', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7219}, {'_id': '6407d3d3a7bc7c3865addc4b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6407d3d3a7bc7c3865addc4b/NEMP6xShRq-5H_rQ5ganY.jpeg', 'fullname': 'Jianbo Wu', 'name': 'jwu323', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6}]","[{'reaction': '🚀', 'users': ['jwu323', 'di-zhang-fdu', 'Sri-Vigneshwar-DJ', 'Zhiding', 'davidrd123', 'Norod78', 'damilojohn', 'John6666', 'shetumohanto', 'Nymbo', 'xcyang', 'AdinaY', 's3nh'], 'count': 13}, {'reaction': '🤗', 'users': ['Lyte', 'AdinaY', 's3nh'], 'count': 3}, {'reaction': '🔥', 'users': ['kz919'], 'count': 1}]",2024-12-02 00:13:41,2024-12-03 08:59:25.226,"[{'_id': '64bce15bafd1e46c5504ad38', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png', 'fullname': 'Di Zhang', 'name': 'di-zhang-fdu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 151, 'isFollowing': False}, {'_id': '63a369d98c0c89dcae3b8329', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg', 'fullname': 'Adina Yakefu', 'name': 'AdinaY', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 774, 'isFollowing': False}]",/posts/di-zhang-fdu/390477434706554,3093,"{'language': 'en', 'probability': 0.7025601863861084}",3 +/avatars/0bc16a7447cd71ac18828a678313bd83.svg,11.0,Mike Young,mikelabs,645658408145024,"[{'type': 'text', 'value': ""love how bipedal robot papers are always like we made it not fall down! and somehow thats still a major accomplishment in 2024 🤖 (that's actually not totally fair, walking is really hard ok)"", 'raw': ""love how bipedal robot papers are always like we made it not fall down! and somehow thats still a major accomplishment in 2024 🤖 (that's actually not totally fair, walking is really hard ok)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.aimodels.fyi/papers/arxiv/real-time-safe-bipedal-robot-navigation-using', 'raw': 'https://www.aimodels.fyi/papers/arxiv/real-time-safe-bipedal-robot-navigation-using'}]","love how bipedal robot papers are always like we made it not fall down! and somehow thats still a major accomplishment in 2024 🤖 (that's actually not totally fair, walking is really hard ok) + +https://www.aimodels.fyi/papers/arxiv/real-time-safe-bipedal-robot-navigation-using",[],[],"[{'reaction': '👀', 'users': ['John6666', 'mikelabs'], 'count': 2}]",2024-12-01 14:50:33,2024-12-01 14:50:33.852,[],/posts/mikelabs/645658408145024,2224,"{'language': 'en', 'probability': 0.9213880896568298}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,471137969449325,"[{'type': 'text', 'value': 'Excited to share ', 'raw': 'Excited to share '}, {'type': 'mention', 'user': 'LinkedIn', 'raw': '@LinkedIn'}, {'type': 'text', 'value': "" 's innovative approach to evaluating semantic search quality! As part of the Search AI team, we've developed a groundbreaking evaluation pipeline that revolutionizes how we measure search relevance."", 'raw': "" 's innovative approach to evaluating semantic search quality! As part of the Search AI team, we've developed a groundbreaking evaluation pipeline that revolutionizes how we measure search relevance.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Key Innovation: On-Topic Rate (OTR)', 'raw': '>> Key Innovation: On-Topic Rate (OTR)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This novel metric measures the semantic match between queries and search results, going beyond simple keyword matching. The system evaluates whether content is truly relevant to the query's intent, not just matching surface-level terms."", 'raw': ""This novel metric measures the semantic match between queries and search results, going beyond simple keyword matching. The system evaluates whether content is truly relevant to the query's intent, not just matching surface-level terms.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Technical Implementation Details', 'raw': '>> Technical Implementation Details'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Query Set Construction', 'raw': 'Query Set Construction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Golden Set: Contains curated top queries and complex topical queries', 'raw': '• Golden Set: Contains curated top queries and complex topical queries'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Open Set: Includes trending queries and random production queries for diversity', 'raw': '• Open Set: Includes trending queries and random production queries for diversity'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Evaluation Pipeline Architecture', 'raw': 'Evaluation Pipeline Architecture'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Query Processing:', 'raw': '1. Query Processing:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Retrieves top 10 documents per query', 'raw': '- Retrieves top 10 documents per query'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Extracts post text and article information', 'raw': '- Extracts post text and article information'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Processes both primary content and reshared materials', 'raw': '- Processes both primary content and reshared materials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. GAI Integration:', 'raw': '2. GAI Integration:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Leverages GPT-3.5 with specialized prompts', 'raw': '- Leverages GPT-3.5 with specialized prompts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Produces three key outputs:', 'raw': '- Produces three key outputs:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Binary relevance decision', 'raw': '- Binary relevance decision'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Relevance score (0-1 range)', 'raw': '- Relevance score (0-1 range)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Decision reasoning', 'raw': '- Decision reasoning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Quality Assurance', 'raw': 'Quality Assurance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Validation achieved 94.5% accuracy on a test set of 600 query-post pairs', 'raw': '• Validation achieved 94.5% accuracy on a test set of 600 query-post pairs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Human evaluation showed 81.72% consistency with expert annotators', 'raw': '• Human evaluation showed 81.72% consistency with expert annotators'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Business Impact', 'raw': '>> Business Impact'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This system now serves as LinkedIn's benchmark for content search experiments, enabling:"", 'raw': ""This system now serves as LinkedIn's benchmark for content search experiments, enabling:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Weekly performance monitoring', 'raw': '• Weekly performance monitoring'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Rapid offline testing of new ML models', 'raw': '• Rapid offline testing of new ML models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Systematic identification of improvement opportunities', 'raw': '• Systematic identification of improvement opportunities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What are your thoughts on semantic search evaluation?', 'raw': 'What are your thoughts on semantic search evaluation?'}]","Excited to share @LinkedIn 's innovative approach to evaluating semantic search quality! As part of the Search AI team, we've developed a groundbreaking evaluation pipeline that revolutionizes how we measure search relevance. + +>> Key Innovation: On-Topic Rate (OTR) +This novel metric measures the semantic match between queries and search results, going beyond simple keyword matching. The system evaluates whether content is truly relevant to the query's intent, not just matching surface-level terms. + +>> Technical Implementation Details +Query Set Construction +• Golden Set: Contains curated top queries and complex topical queries +• Open Set: Includes trending queries and random production queries for diversity + +Evaluation Pipeline Architecture +1. Query Processing: +- Retrieves top 10 documents per query +- Extracts post text and article information +- Processes both primary content and reshared materials + +2. GAI Integration: +- Leverages GPT-3.5 with specialized prompts +- Produces three key outputs: +- Binary relevance decision +- Relevance score (0-1 range) +- Decision reasoning + +Quality Assurance +• Validation achieved 94.5% accuracy on a test set of 600 query-post pairs +• Human evaluation showed 81.72% consistency with expert annotators + +>> Business Impact +This system now serves as LinkedIn's benchmark for content search experiments, enabling: +• Weekly performance monitoring +• Rapid offline testing of new ML models +• Systematic identification of improvement opportunities + +What are your thoughts on semantic search evaluation?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/OnTix95m_6vD690puzD5_.jpeg'}]","[{'_id': '64facac17904ea30e656c5b7', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64facac17904ea30e656c5b7/REw2SbbjQRDkiU1iEsHc8.png', 'fullname': 'LinkedIn', 'name': 'LinkedIln', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}]","[{'reaction': '👀', 'users': ['John6666', 'BrigitteTousi'], 'count': 2}, {'reaction': '🔥', 'users': ['BrigitteTousi'], 'count': 1}]",2024-12-01 10:24:47,2024-12-01 10:24:47.434,[],/posts/singhsidhukuldeep/471137969449325,946,"{'language': 'en', 'probability': 0.8059738278388977}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png,1048.0,Hexgrad,hexgrad,404979273213971,"[{'type': 'inline_code', 'code': 'self.brag():', 'raw': '`self.brag():`'}, {'type': 'text', 'value': ' Kokoro finally got 300 votes in ', 'raw': ' Kokoro finally got 300 votes in '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Pendrokar/TTS-Spaces-Arena'}, 'url': 'https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena', 'raw': 'https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena'}, {'type': 'text', 'value': ' after ', 'raw': ' after '}, {'type': 'mention', 'user': 'Pendrokar', 'raw': '@Pendrokar'}, {'type': 'text', 'value': ' was kind enough to add it 3 weeks ago.', 'raw': ' was kind enough to add it 3 weeks ago.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Discounting the small sample size of votes, I think it is safe to say that ', 'raw': 'Discounting the small sample size of votes, I think it is safe to say that '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'hexgrad/Kokoro-TTS'}, 'url': 'https://huggingface.co/spaces/hexgrad/Kokoro-TTS', 'raw': 'https://huggingface.co/spaces/hexgrad/Kokoro-TTS'}, {'type': 'text', 'value': ' is currently a top 3 model among the contenders in that Arena. This is notable because:', 'raw': ' is currently a top 3 model among the contenders in that Arena. This is notable because:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- At 82M params, Kokoro is one of the smaller models in the Arena', 'raw': '- At 82M params, Kokoro is one of the smaller models in the Arena'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MeloTTS has 52M params', 'raw': '- MeloTTS has 52M params'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- F5 TTS has 330M params', 'raw': '- F5 TTS has 330M params'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- XTTSv2 has 467M params', 'raw': '- XTTSv2 has 467M params'}]","`self.brag():` Kokoro finally got 300 votes in https://huggingface.co/spaces/Pendrokar/TTS-Spaces-Arena after @Pendrokar was kind enough to add it 3 weeks ago. +Discounting the small sample size of votes, I think it is safe to say that https://huggingface.co/spaces/hexgrad/Kokoro-TTS is currently a top 3 model among the contenders in that Arena. This is notable because: +- At 82M params, Kokoro is one of the smaller models in the Arena +- MeloTTS has 52M params +- F5 TTS has 330M params +- XTTSv2 has 467M params","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6629552c96f529a39bac7c89/9FOWfgbjPOGf-Ug2RLgwb.png'}]","[{'_id': '63d52e0c4e5642795617f668', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d52e0c4e5642795617f668/ztXLrdFz3gkUJUIIQXfHo.png', 'fullname': 'Yanis L', 'name': 'Pendrokar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 56}]","[{'reaction': '🔥', 'users': ['Pendrokar', 'Delta-Vector', 'djuna', 'John6666', 'shetumohanto', 'KingNish', 'FounderFeed', 'not-lain', 'linz'], 'count': 9}]",2024-11-30 21:52:56,2024-12-02 02:02:14.043,"[{'_id': '63d52e0c4e5642795617f668', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d52e0c4e5642795617f668/ztXLrdFz3gkUJUIIQXfHo.png', 'fullname': 'Yanis L', 'name': 'Pendrokar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 56, 'isFollowing': False}, {'_id': '6629552c96f529a39bac7c89', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png', 'fullname': 'Hexgrad', 'name': 'hexgrad', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1048, 'isFollowing': False}, {'_id': '66c26b6fb01b19d8c3c2467b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66c26b6fb01b19d8c3c2467b/fCFcHHgkDBhGZvjeIIbwN.png', 'fullname': 'Mango', 'name': 'Delta-Vector', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 185, 'isFollowing': False}, {'_id': '66e67a5a53d41583b777bcb0', 'avatarUrl': '/avatars/085e548e0ae70e4fd4079b4acaeb9c82.svg', 'fullname': 'Abhinandan Pandey', 'name': 'abhinandan1111', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/hexgrad/404979273213971,3063,"{'language': 'en', 'probability': 0.9324449300765991}",5 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,226875720200341,"[{'type': 'text', 'value': 'Hugging Face is becoming the best place to share the most viral AI apps with spaces. ', 'raw': 'Hugging Face is becoming the best place to share the most viral AI apps with spaces. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kolors Virtual Try-on just crossed 6,000,000 unique visitors & is now the #5 most popular space. Congrats to the Kwai Kolors team! ', 'raw': 'Kolors Virtual Try-on just crossed 6,000,000 unique visitors & is now the #5 most popular space. Congrats to the Kwai Kolors team! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Kwai-Kolors/Kolors-Virtual-Try-On'}, 'url': 'https://huggingface.co/spaces/Kwai-Kolors/Kolors-Virtual-Try-On', 'raw': 'https://huggingface.co/spaces/Kwai-Kolors/Kolors-Virtual-Try-On'}]","Hugging Face is becoming the best place to share the most viral AI apps with spaces. + +Kolors Virtual Try-on just crossed 6,000,000 unique visitors & is now the #5 most popular space. Congrats to the Kwai Kolors team! + +https://huggingface.co/spaces/Kwai-Kolors/Kolors-Virtual-Try-On","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/hrLejjX5o72y4GNurSfua.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/8ciZOXyVeJuQ87t_HCNNO.png'}]",[],"[{'reaction': '❤️', 'users': ['reonyy', 'John6666', 'alielfilali01', 'LPDoctor', 'KingNish', 'victor', 'AtAndDev', 'SignedAdam', 'qubvel-hf', 'Joseph717171', 'GoDjMike', 'loubnabnl', 'philquist', 'Omarito2412', 'ervijayraghuwanshi', 'Susant-Achary', 'sayakpaul', 'Fishtiks'], 'count': 18}, {'reaction': '🔥', 'users': ['yisabsi', 'AtAndDev', 'qubvel-hf', 'Joseph717171', 'loubnabnl', 'siddqamar'], 'count': 6}, {'reaction': '👍', 'users': ['rjmalagon', 'Joseph717171'], 'count': 2}, {'reaction': '🤗', 'users': ['Joseph717171'], 'count': 1}]",2024-11-30 15:19:13,2024-12-02 04:17:49.147,"[{'_id': '64aed7a70d8a0c9ccf169400', 'avatarUrl': '/avatars/22ed1ee2ed3dcf3c6d4ebcd96551934b.svg', 'fullname': 'Bahtiyar Ergün', 'name': 'bahtiyarergun', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '6725e9c068fccafba25c70c5', 'avatarUrl': '/avatars/d8ce4fb2c26d2c628980b519711dee8b.svg', 'fullname': 'Sergo Lamp', 'name': 'SergoLamp', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/clem/226875720200341,4567,"{'language': 'en', 'probability': 0.825945258140564}",2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg,40.0,Jordan Legg,takarajordan,927208988224169,"[{'type': 'text', 'value': ""I'm not sure why I haven't done this already!"", 'raw': ""I'm not sure why I haven't done this already!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I just made a space to count and visualize tokens for Diffusion models, no more guesswork! It's super fast too."", 'raw': ""I just made a space to count and visualize tokens for Diffusion models, no more guesswork! It's super fast too.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out here and try out your prompts: ', 'raw': 'Check it out here and try out your prompts: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'takarajordan/DiffusionTokenizer'}, 'url': 'https://huggingface.co/spaces/takarajordan/DiffusionTokenizer', 'raw': 'https://huggingface.co/spaces/takarajordan/DiffusionTokenizer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Uses these tokenizers below:', 'raw': 'Uses these tokenizers below:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'openai/clip-vit-large-patch14'}, 'url': 'https://huggingface.co/openai/clip-vit-large-patch14', 'raw': 'https://huggingface.co/openai/clip-vit-large-patch14'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'google/t5-v1_1-xxl'}, 'url': 'https://huggingface.co/google/t5-v1_1-xxl', 'raw': 'https://huggingface.co/google/t5-v1_1-xxl'}]","I'm not sure why I haven't done this already! + +I just made a space to count and visualize tokens for Diffusion models, no more guesswork! It's super fast too. + +Check it out here and try out your prompts: https://huggingface.co/spaces/takarajordan/DiffusionTokenizer + +Uses these tokenizers below: +https://huggingface.co/openai/clip-vit-large-patch14 +https://huggingface.co/google/t5-v1_1-xxl",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'lagipa9977', 'John6666', 'ZennyKenny'], 'count': 4}]",2024-11-27 16:57:58,2024-11-27 16:57:58.053,[],/posts/takarajordan/927208988224169,1212,"{'language': 'en', 'probability': 0.7997938394546509}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/w3Z6xyKVBA6np65Tb16dP.jpeg,68.0,Simon Pagezy,pagezyhf,462141642378123,"[{'type': 'text', 'value': 'Hello Hugging Face Community,', 'raw': 'Hello Hugging Face Community,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'if you use Google Kubernetes Engine to host you ML workloads, I think this series of videos is a great way to kickstart your journey of deploying LLMs, in less than 10 minutes! Thank you ', 'raw': 'if you use Google Kubernetes Engine to host you ML workloads, I think this series of videos is a great way to kickstart your journey of deploying LLMs, in less than 10 minutes! Thank you '}, {'type': 'mention', 'user': 'wietse-venema-demo', 'raw': '@wietse-venema-demo'}, {'type': 'text', 'value': ' !', 'raw': ' !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To watch in this order:', 'raw': 'To watch in this order:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Learn what are Hugging Face Deep Learning Containers', 'raw': '1. Learn what are Hugging Face Deep Learning Containers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/aWMp_hUUa0c?si=t-LPRkRNfD3DDNfr', 'raw': 'https://youtu.be/aWMp_hUUa0c?si=t-LPRkRNfD3DDNfr'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Learn how to deploy a LLM with our Deep Learning Container using Text Generation Inference', 'raw': '2. Learn how to deploy a LLM with our Deep Learning Container using Text Generation Inference'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/Q3oyTOU1TMc?si=V6Dv-U1jt1SR97fj', 'raw': 'https://youtu.be/Q3oyTOU1TMc?si=V6Dv-U1jt1SR97fj'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Learn how to scale your inference endpoint based on traffic', 'raw': '3. Learn how to scale your inference endpoint based on traffic'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/QjLZ5eteDds?si=nDIAirh1r6h2dQMD', 'raw': 'https://youtu.be/QjLZ5eteDds?si=nDIAirh1r6h2dQMD'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you want more of these small tutorials and have any theme in mind, let me know! ', 'raw': 'If you want more of these small tutorials and have any theme in mind, let me know! '}]","Hello Hugging Face Community, + +if you use Google Kubernetes Engine to host you ML workloads, I think this series of videos is a great way to kickstart your journey of deploying LLMs, in less than 10 minutes! Thank you @wietse-venema-demo ! + +To watch in this order: +1. Learn what are Hugging Face Deep Learning Containers +https://youtu.be/aWMp_hUUa0c?si=t-LPRkRNfD3DDNfr + +2. Learn how to deploy a LLM with our Deep Learning Container using Text Generation Inference +https://youtu.be/Q3oyTOU1TMc?si=V6Dv-U1jt1SR97fj + +3. Learn how to scale your inference endpoint based on traffic +https://youtu.be/QjLZ5eteDds?si=nDIAirh1r6h2dQMD + +If you want more of these small tutorials and have any theme in mind, let me know! ",[],"[{'_id': '66fa6693cb7628aaefafa9fe', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/epEa83WUS80c0uucHI--0.jpeg', 'fullname': 'Wietse Venema', 'name': 'wietse-venema-demo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-27 15:34:16,2024-11-27 15:36:39.187,[],/posts/pagezyhf/462141642378123,312,"{'language': 'en', 'probability': 0.7096571922302246}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,663466156074132,"[{'type': 'text', 'value': 'The authors of ColPali trained a retrieval model based on SmolVLM 🤠 ', 'raw': 'The authors of ColPali trained a retrieval model based on SmolVLM 🤠 '}, {'type': 'link', 'href': 'https://huggingface.co/vidore/colsmolvlm-alpha', 'raw': 'https://huggingface.co/vidore/colsmolvlm-alpha'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TLDR; ', 'raw': 'TLDR; '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ColSmolVLM performs better than ColPali and DSE-Qwen2 on all English tasks', 'raw': '- ColSmolVLM performs better than ColPali and DSE-Qwen2 on all English tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ColSmolVLM is more memory efficient than ColQwen2 💗 ', 'raw': '- ColSmolVLM is more memory efficient than ColQwen2 💗 '}]","The authors of ColPali trained a retrieval model based on SmolVLM 🤠 https://huggingface.co/vidore/colsmolvlm-alpha +TLDR; + +- ColSmolVLM performs better than ColPali and DSE-Qwen2 on all English tasks + +- ColSmolVLM is more memory efficient than ColQwen2 💗 ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/famfZqjmQHHZ1ZGIlwWCu.png'}]",[],"[{'reaction': '👍', 'users': ['Aurelien-Morgan', 'BrigitteTousi', 'ucsahin', 'John6666', 'davanstrien', 'thomwolf', 'shetumohanto', 'Steveeeeeeen'], 'count': 8}, {'reaction': '🔥', 'users': ['andito', 'ucsahin', 'PotatoEnrage', 'thomwolf', 'Steveeeeeeen'], 'count': 5}, {'reaction': '👀', 'users': ['nieche', 'ucsahin'], 'count': 2}]",2024-11-27 14:12:55,2024-11-27 14:12:55.036,[],/posts/merve/663466156074132,2240,"{'language': 'en', 'probability': 0.8085429668426514}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d66b494bbd0d92b641cdbb/6-7dm7B-JxcoS1QlCPdMN.jpeg,244.0,Andres Marafioti,andito,228178444252021,"[{'type': 'text', 'value': ""Let's go! We are releasing SmolVLM, a smol 2B VLM built for on-device inference that outperforms all models at similar GPU RAM usage and tokens throughputs."", 'raw': ""Let's go! We are releasing SmolVLM, a smol 2B VLM built for on-device inference that outperforms all models at similar GPU RAM usage and tokens throughputs.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SmolVLM generates tokens 7.5 to 16 times faster than Qwen2-VL! 🤯', 'raw': '- SmolVLM generates tokens 7.5 to 16 times faster than Qwen2-VL! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Other models at this size crash a laptop, but SmolVLM comfortably generates 17 tokens/sec on a macbook! 🚀', 'raw': '- Other models at this size crash a laptop, but SmolVLM comfortably generates 17 tokens/sec on a macbook! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SmolVLM can be fine-tuned on a Google collab! Or process millions of documents with a consumer GPU!', 'raw': '- SmolVLM can be fine-tuned on a Google collab! Or process millions of documents with a consumer GPU!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SmolVLM even outperforms larger models in video benchmarks, despite not even being trained on videos!', 'raw': '- SmolVLM even outperforms larger models in video benchmarks, despite not even being trained on videos!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out more!', 'raw': 'Check out more!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'HuggingFaceTB/SmolVLM'}, 'url': 'https://huggingface.co/spaces/HuggingFaceTB/SmolVLM', 'raw': 'https://huggingface.co/spaces/HuggingFaceTB/SmolVLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/smolvlm', 'raw': 'https://huggingface.co/blog/smolvlm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'HuggingFaceTB/SmolVLM-Instruct'}, 'url': 'https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct', 'raw': 'https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fine-tuning script: ', 'raw': 'Fine-tuning script: '}, {'type': 'link', 'href': 'https://github.com/huggingface/smollm/blob/main/finetuning/Smol_VLM_FT.ipynb', 'raw': 'https://github.com/huggingface/smollm/blob/main/finetuning/Smol_VLM_FT.ipynb'}, {'type': 'new_line', 'raw': '\n'}]","Let's go! We are releasing SmolVLM, a smol 2B VLM built for on-device inference that outperforms all models at similar GPU RAM usage and tokens throughputs. + +- SmolVLM generates tokens 7.5 to 16 times faster than Qwen2-VL! 🤯 +- Other models at this size crash a laptop, but SmolVLM comfortably generates 17 tokens/sec on a macbook! 🚀 +- SmolVLM can be fine-tuned on a Google collab! Or process millions of documents with a consumer GPU! +- SmolVLM even outperforms larger models in video benchmarks, despite not even being trained on videos! + +Check out more! +Demo: https://huggingface.co/spaces/HuggingFaceTB/SmolVLM +Blog: https://huggingface.co/blog/smolvlm +Model: https://huggingface.co/HuggingFaceTB/SmolVLM-Instruct +Fine-tuning script: https://github.com/huggingface/smollm/blob/main/finetuning/Smol_VLM_FT.ipynb +",[],[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'BrigitteTousi', 'jpgallegoar', 'davanstrien', 'pcuenq', 'clem', 'Dref360', 'linoyts', 's3nh', 'Felladrin', 'iamsingularity', 'John6666', 'sadhaklal', 'gabrielmbmb', 'quyet7779', 'andito', 'shetumohanto', 'coderfpv'], 'count': 18}, {'reaction': '❤️', 'users': ['clem', 'Dref360', 'jeffboudier', 'Felladrin', 'iamsingularity', 'shetumohanto'], 'count': 6}, {'reaction': '🤗', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['coderfpv'], 'count': 1}]",2024-11-27 13:30:58,2024-11-27 13:30:58.485,[],/posts/andito/228178444252021,3403,"{'language': 'en', 'probability': 0.7508378028869629}",0 +/avatars/6cda37befc873a92ed6d5dcba507954a.svg,15.0,Haebin Seong,hbseong,841552401488164,"[{'type': 'text', 'value': '🚨🔥 HOT MODEL!! Currently 870 downloads!!! 🔥🚨', 'raw': '🚨🔥 HOT MODEL!! Currently 870 downloads!!! 🔥🚨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Introducing the 435M model that outperforms Llama-Guard-3-8B while slashing 75% of the computation cost! 💻💥', 'raw': 'Introducing the 435M model that outperforms Llama-Guard-3-8B while slashing 75% of the computation cost! 💻💥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Check it out: ', 'raw': '👉 Check it out: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'hbseong/HarmAug-Guard'}, 'url': 'https://huggingface.co/hbseong/HarmAug-Guard', 'raw': 'https://huggingface.co/hbseong/HarmAug-Guard'}, {'type': 'text', 'value': ' (Yes, INFERENCE CODE INCLUDED! 💡)', 'raw': ' (Yes, INFERENCE CODE INCLUDED! 💡)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details in our paper: ', 'raw': 'More details in our paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2410.01524', 'raw': 'https://arxiv.org/abs/2410.01524'}, {'type': 'text', 'value': ' 📜', 'raw': ' 📜'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#HarmAug #LLM # Safety #EfficiencyBoost #Research #AI #MachineLearning ', 'raw': '#HarmAug #LLM # Safety #EfficiencyBoost #Research #AI #MachineLearning '}]","🚨🔥 HOT MODEL!! Currently 870 downloads!!! 🔥🚨 + +Introducing the 435M model that outperforms Llama-Guard-3-8B while slashing 75% of the computation cost! 💻💥 +👉 Check it out: https://huggingface.co/hbseong/HarmAug-Guard (Yes, INFERENCE CODE INCLUDED! 💡) + +More details in our paper: https://arxiv.org/abs/2410.01524 📜 + +#HarmAug #LLM # Safety #EfficiencyBoost #Research #AI #MachineLearning ",[],[],"[{'reaction': '👀', 'users': ['John6666', 'GoDjMike', 'djuna'], 'count': 3}, {'reaction': '❤️', 'users': ['takarajordan'], 'count': 1}]",2024-11-27 13:05:17,2024-11-27 13:55:52.214,"[{'_id': '6747216773f994999f55e474', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/BCVYw_TVAT8jx6gazhFII.png', 'fullname': 'Beltran', 'name': 'Zulypa', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/hbseong/841552401488164,952,"{'language': 'en', 'probability': 0.6909310817718506}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63f7888abd28622c9b9a0b80/5t6JU_Cm7yFYTRUGr9eqH.jpeg,78.0,Natalia Elvira,nataliaElv,704781741636095,"[{'type': 'text', 'value': 'Would you like to get a high-quality dataset to pre-train LLMs in your language? 🌏', 'raw': 'Would you like to get a high-quality dataset to pre-train LLMs in your language? 🌏'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""At Hugging Face we're preparing a collaborative annotation effort to build an open-source multilingual dataset as part of the Data is Better Together initiative. "", 'raw': ""At Hugging Face we're preparing a collaborative annotation effort to build an open-source multilingual dataset as part of the Data is Better Together initiative. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow the link below, check if your language is listed and sign up to be a Language Lead!', 'raw': 'Follow the link below, check if your language is listed and sign up to be a Language Lead!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://forms.gle/s9nGajBh6Pb9G72J6', 'raw': 'https://forms.gle/s9nGajBh6Pb9G72J6'}]","Would you like to get a high-quality dataset to pre-train LLMs in your language? 🌏 + +At Hugging Face we're preparing a collaborative annotation effort to build an open-source multilingual dataset as part of the Data is Better Together initiative. + +Follow the link below, check if your language is listed and sign up to be a Language Lead! + +https://forms.gle/s9nGajBh6Pb9G72J6",[],[],"[{'reaction': '👀', 'users': ['John6666', 'davanstrien', 'stefan-it', 'theblackcat102', 'BrigitteTousi', 'ayymen', 'iky1e', 'Cossale', 'ai-everyday', 'aloobun', 'Stopwolf'], 'count': 11}, {'reaction': '❤️', 'users': ['Jose7juanFdz'], 'count': 1}]",2024-11-27 10:18:52,2024-11-27 10:19:35.487,[],/posts/nataliaElv/704781741636095,1657,"{'language': 'en', 'probability': 0.8837218284606934}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,970693148533155,"[{'type': 'text', 'value': 'Hugging Face recently added Bluesky to profile links, which is cool. It would be great to also support links to alternative Git services like Codeberg, GitLab, and Gitea. Many developers use platforms beyond GitHub, and showcasing repositories from these sites would be a great feature', 'raw': 'Hugging Face recently added Bluesky to profile links, which is cool. It would be great to also support links to alternative Git services like Codeberg, GitLab, and Gitea. Many developers use platforms beyond GitHub, and showcasing repositories from these sites would be a great feature'}]","Hugging Face recently added Bluesky to profile links, which is cool. It would be great to also support links to alternative Git services like Codeberg, GitLab, and Gitea. Many developers use platforms beyond GitHub, and showcasing repositories from these sites would be a great feature","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/643ac5d2e2b979ae6144d68c/h7gtrsC77plnUaRPRVJgK.png'}]",[],"[{'reaction': '🤗', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['takarajordan'], 'count': 1}]",2024-11-27 08:57:04,2024-11-27 08:57:04.166,[],/posts/nyuuzyou/970693148533155,946,"{'language': 'en', 'probability': 0.9494836926460266}",0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1667340977124-noauth.jpeg,1.0,Abhishek Patnia,appliedml42,725162082538334,"[{'type': 'text', 'value': 'I am trying to find resources that explain how I can protect against instruction following capability degradation due to LoRA fine-tuning. ', 'raw': 'I am trying to find resources that explain how I can protect against instruction following capability degradation due to LoRA fine-tuning. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' For example, I fine-tuned Llama 3.2 3B Instruct on ', 'raw': ' For example, I fine-tuned Llama 3.2 3B Instruct on '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'cornell-movie-review-data/rotten_tomatoes'}, 'url': 'https://huggingface.co/datasets/cornell-movie-review-data/rotten_tomatoes', 'raw': 'https://huggingface.co/datasets/cornell-movie-review-data/rotten_tomatoes'}, {'type': 'text', 'value': ' dataset and saw significant degradation in ifeval benchmark scores. ', 'raw': ' dataset and saw significant degradation in ifeval benchmark scores. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I would appreciate any pointers 🙏🏽', 'raw': 'I would appreciate any pointers 🙏🏽'}]","I am trying to find resources that explain how I can protect against instruction following capability degradation due to LoRA fine-tuning. + + For example, I fine-tuned Llama 3.2 3B Instruct on https://huggingface.co/datasets/cornell-movie-review-data/rotten_tomatoes dataset and saw significant degradation in ifeval benchmark scores. + +I would appreciate any pointers 🙏🏽",[],[],"[{'reaction': '👀', 'users': ['John6666', 'havona3465'], 'count': 2}]",2024-11-26 23:53:16,2024-11-28 19:02:19.107,"[{'_id': '63043fe07373aacccd8a05ce', 'avatarUrl': '/avatars/18df5db7781e49c30af73445f5fef724.svg', 'fullname': 'Victor Hall', 'name': 'panopstor', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}]",/posts/appliedml42/725162082538334,1315,"{'language': 'en', 'probability': 0.8863638639450073}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,635419468276177,"[{'type': 'text', 'value': 'FLUX Redux is a hidden Gem', 'raw': 'FLUX Redux is a hidden Gem'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I am still doing huge research to publish an amazing fully Public - no paywalled Tutorial, but this is generated via SwarmUI', 'raw': 'I am still doing huge research to publish an amazing fully Public - no paywalled Tutorial, but this is generated via SwarmUI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Style Model Merge Strength : 0.5', 'raw': 'Style Model Merge Strength : 0.5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FLUX Guidance Scale is : 6', 'raw': 'FLUX Guidance Scale is : 6'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Used base model is my FLUX fine tuned model with 256 images via Kohya SS GUI as shown in tutorial ( ', 'raw': 'Used base model is my FLUX fine tuned model with 256 images via Kohya SS GUI as shown in tutorial ( '}, {'type': 'link', 'href': 'https://youtu.be/FvpWy1x5etM', 'raw': 'https://youtu.be/FvpWy1x5etM'}, {'type': 'text', 'value': ' ) - 70 epoch', 'raw': ' ) - 70 epoch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Prompt : anime ohwx man walking in a jungle ohwx man, anime', 'raw': 'Prompt : anime ohwx man walking in a jungle ohwx man, anime'}]","FLUX Redux is a hidden Gem + +I am still doing huge research to publish an amazing fully Public - no paywalled Tutorial, but this is generated via SwarmUI + +Style Model Merge Strength : 0.5 + +FLUX Guidance Scale is : 6 + +Used base model is my FLUX fine tuned model with 256 images via Kohya SS GUI as shown in tutorial ( https://youtu.be/FvpWy1x5etM ) - 70 epoch + +Prompt : anime ohwx man walking in a jungle ohwx man, anime","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Gzb7zVIMQGfUeDMsjB_8f.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/3yj8dPaGCPXTkrFne1_m0.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Imrg4UGzpC7sW6Hw8xdwG.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/GrKqsAROV35vxv5BeULM2.png'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'Sri-Vigneshwar-DJ', 'havona3465', 'win10', 'Alper666', 'marksverdhei', 'victor'], 'count': 7}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'OreoAu', 'ArthurZ', 'iky1e', 'costastsaou'], 'count': 5}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'ArthurZ'], 'count': 2}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666'], 'count': 2}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2024-11-26 23:29:52,2025-01-06 15:01:37.046,"[{'_id': '66af454505e2b2771b72f673', 'avatarUrl': '/avatars/9d3bb56984cdbf511463968227bdb304.svg', 'fullname': 'Issou', 'name': 'Jesus666', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6345bd89fe134dfd7a0dba40', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg', 'fullname': 'Furkan Gözükara', 'name': 'MonsterMMORPG', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 646, 'isFollowing': False}]",/posts/MonsterMMORPG/635419468276177,1980,"{'language': 'en', 'probability': 0.7387595176696777}",4 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/lJZriu6mJCgWkyYpbd4Pe.png,14.0,Luke Neumann,LukeNeumann,776620963360337,"[{'type': 'text', 'value': 'I had a question about Trending datasets. Our initial dataset ""Oregon Coast in 4K"" was trending at #3 for video at about 700 downloads.', 'raw': 'I had a question about Trending datasets. Our initial dataset ""Oregon Coast in 4K"" was trending at #3 for video at about 700 downloads.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Over the past two days our downloads have spiked, now up to over 2,000, but the dataset has dropped down to the 3rd or 4th page of Trending.', 'raw': 'Over the past two days our downloads have spiked, now up to over 2,000, but the dataset has dropped down to the 3rd or 4th page of Trending.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What metrics are used to determine dataset Trending position?', 'raw': 'What metrics are used to determine dataset Trending position?'}]","I had a question about Trending datasets. Our initial dataset ""Oregon Coast in 4K"" was trending at #3 for video at about 700 downloads. + +Over the past two days our downloads have spiked, now up to over 2,000, but the dataset has dropped down to the 3rd or 4th page of Trending. + +What metrics are used to determine dataset Trending position?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/673637efb403886c210a588d/ej3aXWYdBOT06qvkuRkoh.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'victor'], 'count': 2}]",2024-11-26 21:45:04,2024-11-27 08:50:46.202,"[{'_id': '643ac5d2e2b979ae6144d68c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png', 'fullname': 'nyuuzyou', 'name': 'nyuuzyou', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244, 'isFollowing': False}]",/posts/LukeNeumann/776620963360337,1040,"{'language': 'en', 'probability': 0.9714099764823914}",1 +https://cdn-avatars.huggingface.co/v1/production/uploads/61c141342aac764ce1654e43/81AwoT5IQ_Xdw0OVw7TKu.jpeg,3482.0,Loubna Ben Allal,loubnabnl,547206198374677,"[{'type': 'text', 'value': 'Making SmolLM2 reproducible: open-sourcing our training & evaluation toolkit 🛠️ ', 'raw': 'Making SmolLM2 reproducible: open-sourcing our training & evaluation toolkit 🛠️ '}, {'type': 'link', 'href': 'https://github.com/huggingface/smollm/', 'raw': 'https://github.com/huggingface/smollm/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Pre-training code with nanotron', 'raw': '- Pre-training code with nanotron'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Evaluation suite with lighteval', 'raw': '- Evaluation suite with lighteval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Synthetic data generation using distilabel (powers our new SFT dataset ', 'raw': '- Synthetic data generation using distilabel (powers our new SFT dataset '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HuggingFaceTB/smoltalk'}, 'url': 'https://huggingface.co/datasets/HuggingFaceTB/smoltalk', 'raw': 'https://huggingface.co/datasets/HuggingFaceTB/smoltalk'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Post-training scripts with TRL & the alignment handbook', 'raw': '- Post-training scripts with TRL & the alignment handbook'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- On-device tools with llama.cpp for summarization, rewriting & agents', 'raw': '- On-device tools with llama.cpp for summarization, rewriting & agents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Apache 2.0 licensed. V2 pre-training data mix coming soon!', 'raw': 'Apache 2.0 licensed. V2 pre-training data mix coming soon!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Which other tools should we add next?', 'raw': 'Which other tools should we add next?'}]","Making SmolLM2 reproducible: open-sourcing our training & evaluation toolkit 🛠️ https://github.com/huggingface/smollm/ + +- Pre-training code with nanotron +- Evaluation suite with lighteval +- Synthetic data generation using distilabel (powers our new SFT dataset https://huggingface.co/datasets/HuggingFaceTB/smoltalk) +- Post-training scripts with TRL & the alignment handbook +- On-device tools with llama.cpp for summarization, rewriting & agents + +Apache 2.0 licensed. V2 pre-training data mix coming soon! + +Which other tools should we add next?",[],[],"[{'reaction': '🔥', 'users': ['reach-vb', 'John6666', 'not-lain', 'AtAndDev', 'Joseph717171', 'vansin'], 'count': 6}, {'reaction': '🤗', 'users': ['Joseph717171'], 'count': 1}, {'reaction': '❤️', 'users': ['ron-wolf'], 'count': 1}]",2024-11-24 16:00:22,2024-11-24 16:01:53.795,[],/posts/loubnabnl/547206198374677,3712,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,415480285355699,"[{'type': 'text', 'value': 'anychat', 'raw': 'anychat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'supports chatgpt, gemini, perplexity, claude, meta llama, grok all in one app', 'raw': 'supports chatgpt, gemini, perplexity, claude, meta llama, grok all in one app'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'try it out there: ', 'raw': 'try it out there: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/akhaliq/anychat', 'raw': 'https://huggingface.co/spaces/akhaliq/anychat'}, {'type': 'new_line', 'raw': '\n'}]","anychat + +supports chatgpt, gemini, perplexity, claude, meta llama, grok all in one app + +try it out there: https://huggingface.co/spaces/akhaliq/anychat +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/qWdfkOtR78BTZZaQBRWfT.png'}]",[],"[{'reaction': '❤️', 'users': ['prithivMLmods', 'lou2191', 'matin-ebrahimkhani', 'tom755408', 'ltim', 'AtAndDev', 'ijohn07'], 'count': 7}, {'reaction': '🚀', 'users': ['AtAndDev', 'matin-ebrahimkhani', 'John6666', 'blessibrahim225'], 'count': 4}, {'reaction': '🔥', 'users': ['AtAndDev', 'matin-ebrahimkhani'], 'count': 2}]",2024-11-24 15:39:51,2024-11-24 15:46:32.640,[],/posts/akhaliq/415480285355699,3524,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857746553-5df7e9e5da6d0311fd3d53f9.jpeg,1173.0,Thomas Wolf,thomwolf,573286768557034,"[{'type': 'text', 'value': 'Interesting long read from ', 'raw': 'Interesting long read from '}, {'type': 'mention', 'user': 'evanmiller-anthropic', 'raw': '@evanmiller-anthropic'}, {'type': 'text', 'value': ' on having a better founded statistical approach to Language Model Evaluations:', 'raw': ' on having a better founded statistical approach to Language Model Evaluations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.anthropic.com/research/statistical-approach-to-model-evals', 'raw': 'https://www.anthropic.com/research/statistical-approach-to-model-evals'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Worth a read if you're into LLM evaluations!"", 'raw': ""Worth a read if you're into LLM evaluations!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cc ', 'raw': 'Cc '}, {'type': 'mention', 'user': 'clefourrier', 'raw': '@clefourrier'}]","Interesting long read from @evanmiller-anthropic on having a better founded statistical approach to Language Model Evaluations: +https://www.anthropic.com/research/statistical-approach-to-model-evals + +Worth a read if you're into LLM evaluations! + +Cc @clefourrier","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5df7e9e5da6d0311fd3d53f9/UOyX5evzJg2CVMd8xoqnb.png'}]","[{'_id': '6202a599216215a22221dea9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1644340617257-noauth.png', 'fullname': 'Clémentine Fourrier', 'name': 'clefourrier', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 641}, {'_id': '66a28b8a1105d2b9e8dae77e', 'avatarUrl': '/avatars/04591248ad3ace7b5f1122ecddc7efe8.svg', 'fullname': 'Evan Miller', 'name': 'evanmiller-anthropic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}]","[{'reaction': '🔥', 'users': ['reach-vb', 'tom755408', 'clefourrier', 'loubnabnl', 'guokan-shang'], 'count': 5}, {'reaction': '🧠', 'users': ['fuzzy-mittenz', 'John6666', 'clefourrier', 'Aurelien-Morgan'], 'count': 4}]",2024-11-24 15:29:59,2024-11-25 21:23:04.228,"[{'_id': '6744e54d857a7f7d7e1a3013', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/v0m73TQcgCq-CY1DQ1R2J.png', 'fullname': 'SpeedFire', 'name': 'SpeedFire', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/thomwolf/573286768557034,1829,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,454790754502988,"[{'type': 'text', 'value': 'For those who want to try out the new ', 'raw': 'For those who want to try out the new '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'black-forest-labs/FLUX.1-Redux-dev'}, 'url': 'https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev', 'raw': 'https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can do this from my latest spaces ', 'raw': 'You can do this from my latest spaces '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MohamedRashad/Flux-Redux'}, 'url': 'https://huggingface.co/spaces/MohamedRashad/Flux-Redux', 'raw': 'https://huggingface.co/spaces/MohamedRashad/Flux-Redux'}]","For those who want to try out the new https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev +You can do this from my latest spaces https://huggingface.co/spaces/MohamedRashad/Flux-Redux",[],[],"[{'reaction': '🔥', 'users': ['John6666'], 'count': 1}]",2024-11-24 14:03:43,2024-11-29 07:05:36.287,[],/posts/MohamedRashad/454790754502988,437,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d52e0c4e5642795617f668/ztXLrdFz3gkUJUIIQXfHo.png,56.0,Yanis L,Pendrokar,418420597611798,"[{'type': 'text', 'value': 'TTS: Sorry, I just cannot get the hype behind F5 TTS. It has now gathered a thousand votes in the TTS Arena fork and **has remained in #8 spot** against the _mostly_ Open TTS adversaries.', 'raw': 'TTS: Sorry, I just cannot get the hype behind F5 TTS. It has now gathered a thousand votes in the TTS Arena fork and **has remained in #8 spot** against the _mostly_ Open TTS adversaries.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The voice sample used is the same as XTTS. F5 has so far been unstable, being unemotional/monotone/depressed and mispronouncing words (_awestruck_).', 'raw': 'The voice sample used is the same as XTTS. F5 has so far been unstable, being unemotional/monotone/depressed and mispronouncing words (_awestruck_).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you have suggestions please give feedback in the following thread:', 'raw': 'If you have suggestions please give feedback in the following thread:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'mrfakename/E2-F5-TTS', 'discussionNum': 32}, 'url': 'https://huggingface.co/spaces/mrfakename/E2-F5-TTS/discussions/32', 'raw': 'https://huggingface.co/spaces/mrfakename/E2-F5-TTS/discussions/32'}]","TTS: Sorry, I just cannot get the hype behind F5 TTS. It has now gathered a thousand votes in the TTS Arena fork and **has remained in #8 spot** against the _mostly_ Open TTS adversaries. + +The voice sample used is the same as XTTS. F5 has so far been unstable, being unemotional/monotone/depressed and mispronouncing words (_awestruck_). + +If you have suggestions please give feedback in the following thread: +https://huggingface.co/spaces/mrfakename/E2-F5-TTS/discussions/32","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d52e0c4e5642795617f668/ZDVB0mKa7SNvAQ1xcGEPH.png'}]",[],"[{'reaction': '❤️', 'users': ['GHArt', 'jbot7984', 'linz'], 'count': 3}, {'reaction': '👍', 'users': ['GHArt', 'linz'], 'count': 2}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-24 11:25:35,2025-01-15 23:19:43.359,"[{'_id': '643b19f8a856622f978df30f', 'avatarUrl': '/avatars/c82779fdf94f80cdb5020504f83c818b.svg', 'fullname': 'Yatharth Sharma', 'name': 'YaTharThShaRma999', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 20, 'isFollowing': False}, {'_id': '63d52e0c4e5642795617f668', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d52e0c4e5642795617f668/ztXLrdFz3gkUJUIIQXfHo.png', 'fullname': 'Yanis L', 'name': 'Pendrokar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 56, 'isFollowing': False}, {'_id': '6629552c96f529a39bac7c89', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png', 'fullname': 'Hexgrad', 'name': 'hexgrad', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1048, 'isFollowing': False}]",/posts/Pendrokar/418420597611798,1086,"{'language': 'en', 'probability': 0.9148432016372681}",6 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png,159.0,Richard A Aragon,TuringsSolutions,319625675629271,"[{'type': 'text', 'value': ""Maybe that post I showed the other day with my Hyperbolic Embeddings getting to perfect loss with RAdam was a one-time fluke, bad test dataset, etc.? Anotha' one! I gave it a test set a PhD student would struggle with. This model is a bit more souped up. Major callouts of the model: High Dimensional Encoding (HDC), Hyperbolic Embeddings, Entropix. Link to the Colab Notebook: "", 'raw': ""Maybe that post I showed the other day with my Hyperbolic Embeddings getting to perfect loss with RAdam was a one-time fluke, bad test dataset, etc.? Anotha' one! I gave it a test set a PhD student would struggle with. This model is a bit more souped up. Major callouts of the model: High Dimensional Encoding (HDC), Hyperbolic Embeddings, Entropix. Link to the Colab Notebook: ""}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1mS-uxhufx-h7eZXL0ZwPMAAXHqSeGZxX?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1mS-uxhufx-h7eZXL0ZwPMAAXHqSeGZxX?usp=sharing'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Maybe that post I showed the other day with my Hyperbolic Embeddings getting to perfect loss with RAdam was a one-time fluke, bad test dataset, etc.? Anotha' one! I gave it a test set a PhD student would struggle with. This model is a bit more souped up. Major callouts of the model: High Dimensional Encoding (HDC), Hyperbolic Embeddings, Entropix. Link to the Colab Notebook: https://colab.research.google.com/drive/1mS-uxhufx-h7eZXL0ZwPMAAXHqSeGZxX?usp=sharing ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64274b69ba6cef0a6ebb0fd6/z_vgKFqgTyfz3LxuffCUB.png'}]",[],[],2024-11-24 08:53:52,2024-12-05 21:06:13.607,"[{'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/TuringsSolutions/319625675629271,601,,26 +https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg,77.0,Kenneth Hamilton,ZennyKenny,538076072707429,"[{'type': 'text', 'value': 'Using AI to teach English as a Foreign Language? EFL teachers often have busy schedules, variable class sizes, and unexpected cancellations. Introducting VocabSova: ', 'raw': 'Using AI to teach English as a Foreign Language? EFL teachers often have busy schedules, variable class sizes, and unexpected cancellations. Introducting VocabSova: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ZennyKenny/VocabSova'}, 'url': 'https://huggingface.co/spaces/ZennyKenny/VocabSova', 'raw': 'https://huggingface.co/spaces/ZennyKenny/VocabSova'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'VocabSova is a simple chatbot interface that helps teachers create topical vocabulary lists, custom worksheets using that vocabulary, and group activities on a defined theme for a specific English-speaking level (according to CEFR international standards).', 'raw': 'VocabSova is a simple chatbot interface that helps teachers create topical vocabulary lists, custom worksheets using that vocabulary, and group activities on a defined theme for a specific English-speaking level (according to CEFR international standards).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'There is a great use case for AI in nearly every field, and language learning is a particularly apt domain in my opinion. VocabSova is in active development during its Alpha release, all feedback welcome.', 'raw': 'There is a great use case for AI in nearly every field, and language learning is a particularly apt domain in my opinion. VocabSova is in active development during its Alpha release, all feedback welcome.'}]","Using AI to teach English as a Foreign Language? EFL teachers often have busy schedules, variable class sizes, and unexpected cancellations. Introducting VocabSova: https://huggingface.co/spaces/ZennyKenny/VocabSova + +VocabSova is a simple chatbot interface that helps teachers create topical vocabulary lists, custom worksheets using that vocabulary, and group activities on a defined theme for a specific English-speaking level (according to CEFR international standards). + +There is a great use case for AI in nearly every field, and language learning is a particularly apt domain in my opinion. VocabSova is in active development during its Alpha release, all feedback welcome.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'dortizg'], 'count': 2}]",2024-11-24 04:48:41,2024-11-24 04:48:41.173,[],/posts/ZennyKenny/538076072707429,407,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,117573628010199,"[{'type': 'text', 'value': 'Good folks from ', 'raw': 'Good folks from '}, {'type': 'mention', 'user': 'amazon', 'raw': '@amazon'}, {'type': 'text', 'value': ', ', 'raw': ', '}, {'type': 'mention', 'user': 'Stanford', 'raw': '@Stanford'}, {'type': 'text', 'value': ', and other great institutions have released “A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language Models!”', 'raw': ', and other great institutions have released “A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language Models!”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This comprehensive survey examines over 32 cutting-edge techniques to combat hallucination in Large Language Models (LLMs). As LLMs become increasingly integral to our daily operations, addressing their tendency to generate ungrounded content is crucial.', 'raw': 'This comprehensive survey examines over 32 cutting-edge techniques to combat hallucination in Large Language Models (LLMs). As LLMs become increasingly integral to our daily operations, addressing their tendency to generate ungrounded content is crucial.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Retrieval-Augmented Generation (RAG) Innovations:', 'raw': 'Retrieval-Augmented Generation (RAG) Innovations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Pre-generation retrieval using LLM-Augmenter with Plug-and-Play modules', 'raw': '- Pre-generation retrieval using LLM-Augmenter with Plug-and-Play modules'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Real-time verification through the EVER framework implementing three-stage validation', 'raw': '- Real-time verification through the EVER framework implementing three-stage validation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Post-generation refinement via the RARR system for automated attribution', 'raw': '- Post-generation refinement via the RARR system for automated attribution'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Advanced Decoding Strategies:', 'raw': 'Advanced Decoding Strategies:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Context-Aware Decoding (CAD) utilizing contrastive output distribution', 'raw': '- Context-Aware Decoding (CAD) utilizing contrastive output distribution'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- DoLa's innovative approach of contrasting logit differences between transformer layers"", 'raw': ""- DoLa's innovative approach of contrasting logit differences between transformer layers""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Knowledge Integration Methods:', 'raw': 'Knowledge Integration Methods:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The RHO framework leveraging entity representations and relation predicates', 'raw': '- The RHO framework leveraging entity representations and relation predicates'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- FLEEK's intelligent fact verification system using curated knowledge graphs"", 'raw': ""- FLEEK's intelligent fact verification system using curated knowledge graphs""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Novel Loss Functions:', 'raw': 'Novel Loss Functions:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Text Hallucination Regularization (THR) derived from mutual information', 'raw': '- Text Hallucination Regularization (THR) derived from mutual information'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The mFACT metric for evaluating faithfulness in multilingual contexts', 'raw': '- The mFACT metric for evaluating faithfulness in multilingual contexts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This research provides a structured taxonomy for categorizing these mitigation techniques, offering valuable insights for practitioners and researchers working with LLMs.', 'raw': 'This research provides a structured taxonomy for categorizing these mitigation techniques, offering valuable insights for practitioners and researchers working with LLMs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What are your thoughts on hallucination mitigation in LLMs?', 'raw': 'What are your thoughts on hallucination mitigation in LLMs?'}]","Good folks from @amazon, @Stanford, and other great institutions have released “A Comprehensive Survey of Hallucination Mitigation Techniques in Large Language Models!” + +This comprehensive survey examines over 32 cutting-edge techniques to combat hallucination in Large Language Models (LLMs). As LLMs become increasingly integral to our daily operations, addressing their tendency to generate ungrounded content is crucial. + +Retrieval-Augmented Generation (RAG) Innovations: +- Pre-generation retrieval using LLM-Augmenter with Plug-and-Play modules +- Real-time verification through the EVER framework implementing three-stage validation +- Post-generation refinement via the RARR system for automated attribution + +Advanced Decoding Strategies: +- Context-Aware Decoding (CAD) utilizing contrastive output distribution +- DoLa's innovative approach of contrasting logit differences between transformer layers + +Knowledge Integration Methods: +- The RHO framework leveraging entity representations and relation predicates +- FLEEK's intelligent fact verification system using curated knowledge graphs + +Novel Loss Functions: +- Text Hallucination Regularization (THR) derived from mutual information +- The mFACT metric for evaluating faithfulness in multilingual contexts + +This research provides a structured taxonomy for categorizing these mitigation techniques, offering valuable insights for practitioners and researchers working with LLMs. + +What are your thoughts on hallucination mitigation in LLMs?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/k5RoT0IOOG9erjBWJVLOj.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-24 02:45:02,2024-11-24 14:55:57.726,[],/posts/singhsidhukuldeep/117573628010199,375,,1 +/avatars/4941f9461c77bb5c5c0b5ec9a6f9efed.svg,76.0,az,xiaozaa,378209596329028,"[{'type': 'text', 'value': 'Release a new virtual tryon flux fill finetuning model. Try it here. ', 'raw': 'Release a new virtual tryon flux fill finetuning model. Try it here. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'xiaozaa/catvton-flux-alpha'}, 'url': 'https://huggingface.co/xiaozaa/catvton-flux-alpha', 'raw': 'https://huggingface.co/xiaozaa/catvton-flux-alpha'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Release a new virtual tryon flux fill finetuning model. Try it here. + +https://huggingface.co/xiaozaa/catvton-flux-alpha + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/75K6VNDnzDG9ihG18C2Ux.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/mXe4GpJHNwxax0maUHf9j.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/eBz7WssbJyWzGURAP_FyI.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/yHUR1ZFr25YYFOIhZ7xIm.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6401517a8ba76abe4b72b2bf/O3Vj2Em-dB7ECbIYeQNvv.png'}]",[],"[{'reaction': '👍', 'users': ['imrankhakwani', 'John6666', 'nicholascao', 'PotatoEnrage', 'swkwon', '1234aurel'], 'count': 6}, {'reaction': '🔥', 'users': ['omarei'], 'count': 1}]",2024-11-24 02:01:24,2025-01-30 09:52:31.327,"[{'_id': '65ddb02aa8bdde3bab07eadd', 'avatarUrl': '/avatars/b0b77820429988896271facf98abad70.svg', 'fullname': 'xiaozhu zhong', 'name': 'tranadooo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '679b4665acf7c1814a36657d', 'avatarUrl': '/avatars/537d6d0b9cbcb6ee264e44240eff0577.svg', 'fullname': 'Manaswini', 'name': 'manaswini13reddy', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/xiaozaa/378209596329028,2389,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg,69.0,Csaba Kecskemeti,csabakecskemeti,834919494324436,"[{'type': 'text', 'value': 'Repurposed my older AI workstation to a homelab server, it has received 2xV100 + 1xP40', 'raw': 'Repurposed my older AI workstation to a homelab server, it has received 2xV100 + 1xP40'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I can reach huge 210k token context size with MegaBeam-Mistral-7B-512k-GGUF ~70+tok/s, or run Llama-3.1-Nemotron-70B-Instruct-HF-GGUF with 50k Context ~10tok/s (V100 only 40k ctx and 15tok/s).', 'raw': 'I can reach huge 210k token context size with MegaBeam-Mistral-7B-512k-GGUF ~70+tok/s, or run Llama-3.1-Nemotron-70B-Instruct-HF-GGUF with 50k Context ~10tok/s (V100 only 40k ctx and 15tok/s).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also able to Lora finetune with similar performace as an RTX3090.', 'raw': 'Also able to Lora finetune with similar performace as an RTX3090.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It moved to the garage to no complaints for the noise from the family. Will move to a Rack soon :D', 'raw': 'It moved to the garage to no complaints for the noise from the family. Will move to a Rack soon :D'}, {'type': 'new_line', 'raw': '\n'}]","Repurposed my older AI workstation to a homelab server, it has received 2xV100 + 1xP40 +I can reach huge 210k token context size with MegaBeam-Mistral-7B-512k-GGUF ~70+tok/s, or run Llama-3.1-Nemotron-70B-Instruct-HF-GGUF with 50k Context ~10tok/s (V100 only 40k ctx and 15tok/s). +Also able to Lora finetune with similar performace as an RTX3090. +It moved to the garage to no complaints for the noise from the family. Will move to a Rack soon :D +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/9RauPJdJLt2gDQ081Udxw.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e6d37e02dee9bcb9d9fa18/34pYYbQd46L04JUC4ZVJ6.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['John6666', 'Rasta02'], 'count': 2}]",2024-11-24 00:04:02,2024-11-24 04:31:05.379,"[{'_id': '674282b725766db587badb84', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/6aIIy7eNLLjepZfp3Aym3.png', 'fullname': 'George M', 'name': 'ge-or-ge', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '64e6d37e02dee9bcb9d9fa18', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg', 'fullname': 'Csaba Kecskemeti', 'name': 'csabakecskemeti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 69, 'isFollowing': False}]",/posts/csabakecskemeti/834919494324436,308,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1667002643224-604a5184dca2c7ac7508b849.jpeg,287.0,Ross Wightman,rwightman,521561593383165,"[{'type': 'text', 'value': 'Want to validate some hparams or figure out what ', 'raw': 'Want to validate some hparams or figure out what '}, {'type': 'inline_code', 'code': 'timm', 'raw': '`timm`'}, {'type': 'text', 'value': ' model to use before commiting to download or training with a large dataset? Try mini-imagenet: ', 'raw': ' model to use before commiting to download or training with a large dataset? Try mini-imagenet: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'timm/mini-imagenet'}, 'url': 'https://huggingface.co/datasets/timm/mini-imagenet', 'raw': 'https://huggingface.co/datasets/timm/mini-imagenet'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I had this sitting on my drive and forgot where I pulled it together from. It's 100 classes of imagenet, 50k train and 10k val images (from ImageNet-1k train set), and 5k test images (from ImageNet-1k val set). 7.4GB instead of > 100GB for the full ImageNet-1k. This ver is not reduced resolution like some other 'mini' versions. Super easy to use with timm train/val scripts, checkout the dataset card."", 'raw': ""I had this sitting on my drive and forgot where I pulled it together from. It's 100 classes of imagenet, 50k train and 10k val images (from ImageNet-1k train set), and 5k test images (from ImageNet-1k val set). 7.4GB instead of > 100GB for the full ImageNet-1k. This ver is not reduced resolution like some other 'mini' versions. Super easy to use with timm train/val scripts, checkout the dataset card.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I often check fine-tuning with even smaller datasets like:', 'raw': 'I often check fine-tuning with even smaller datasets like:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' * ', 'raw': ' * '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'timm/resisc45'}, 'url': 'https://huggingface.co/datasets/timm/resisc45', 'raw': 'https://huggingface.co/datasets/timm/resisc45'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' * ', 'raw': ' * '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'timm/oxford-iiit-pet'}, 'url': 'https://huggingface.co/datasets/timm/oxford-iiit-pet', 'raw': 'https://huggingface.co/datasets/timm/oxford-iiit-pet'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But those are a bit small to train any modest size model w/o starting from pretrained weights. ', 'raw': 'But those are a bit small to train any modest size model w/o starting from pretrained weights. '}]","Want to validate some hparams or figure out what `timm` model to use before commiting to download or training with a large dataset? Try mini-imagenet: https://huggingface.co/datasets/timm/mini-imagenet + +I had this sitting on my drive and forgot where I pulled it together from. It's 100 classes of imagenet, 50k train and 10k val images (from ImageNet-1k train set), and 5k test images (from ImageNet-1k val set). 7.4GB instead of > 100GB for the full ImageNet-1k. This ver is not reduced resolution like some other 'mini' versions. Super easy to use with timm train/val scripts, checkout the dataset card. + +I often check fine-tuning with even smaller datasets like: + * https://huggingface.co/datasets/timm/resisc45 + * https://huggingface.co/datasets/timm/oxford-iiit-pet +But those are a bit small to train any modest size model w/o starting from pretrained weights. ",[],[],"[{'reaction': '🚀', 'users': ['bryant1410', 'John6666', 'byoussef', 'davanstrien', 'clem'], 'count': 5}]",2024-11-20 23:40:27,2024-11-20 23:42:22.958,[],/posts/rwightman/521561593383165,1085,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,234016029667356,"[{'type': 'text', 'value': '🚀 DeepSeek just dropped DeepSeek-R1-Lite-Preview with “reasoning” capacity. ', 'raw': '🚀 DeepSeek just dropped DeepSeek-R1-Lite-Preview with “reasoning” capacity. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Matches OpenAI o1-preview on AIME & MATH benchmarks.', 'raw': '- Matches OpenAI o1-preview on AIME & MATH benchmarks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Transparent process output', 'raw': '- Transparent process output'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Open-source model to be released', 'raw': '- Open-source model to be released'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out: ', 'raw': 'Try it out: '}, {'type': 'link', 'href': 'https://chat.deepseek.com/', 'raw': 'https://chat.deepseek.com/'}]","🚀 DeepSeek just dropped DeepSeek-R1-Lite-Preview with “reasoning” capacity. + +- Matches OpenAI o1-preview on AIME & MATH benchmarks. +- Transparent process output +- Open-source model to be released + +Try it out: https://chat.deepseek.com/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/yk1CjBQVwfy2bDrxUd6Ls.jpeg'}]",[],"[{'reaction': '👍', 'users': ['devops724', 'clem', 'MustaphaLargou25', 'OmbelineM', 'Gl2e3n'], 'count': 5}, {'reaction': '👀', 'users': ['John6666', 'davanstrien', 'yuchenxie', 'clem'], 'count': 4}, {'reaction': '🤯', 'users': ['yuchenxie', 'clem', 'Syrus2'], 'count': 3}]",2024-11-20 22:49:41,2024-11-20 22:49:41.473,[],/posts/fdaudens/234016029667356,2833,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1676555600618-noauth.jpeg,1.0,Martin Gubri,mgubri,873720319675748,"[{'type': 'text', 'value': '🎉 We’re excited to announce, in collaboration with ', 'raw': '🎉 We’re excited to announce, in collaboration with '}, {'type': 'mention', 'user': 'kaleidophon', 'raw': '@kaleidophon'}, {'type': 'text', 'value': ' , the release of the models from our Apricot 🍑 paper, ""Apricot: Calibrating Large Language Models Using Their Generations Only,"" accepted at ACL 2024! Reproducibility is essential in science, and we\'ve worked hard to make it as seamless as possible.', 'raw': ' , the release of the models from our Apricot 🍑 paper, ""Apricot: Calibrating Large Language Models Using Their Generations Only,"" accepted at ACL 2024! Reproducibility is essential in science, and we\'ve worked hard to make it as seamless as possible.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'parameterlab/apricot-models-673d2cae40b6ff437a86f0bf'}, 'url': 'https://huggingface.co/collections/parameterlab/apricot-models-673d2cae40b6ff437a86f0bf', 'raw': 'https://huggingface.co/collections/parameterlab/apricot-models-673d2cae40b6ff437a86f0bf'}]","🎉 We’re excited to announce, in collaboration with @kaleidophon , the release of the models from our Apricot 🍑 paper, ""Apricot: Calibrating Large Language Models Using Their Generations Only,"" accepted at ACL 2024! Reproducibility is essential in science, and we've worked hard to make it as seamless as possible. +https://huggingface.co/collections/parameterlab/apricot-models-673d2cae40b6ff437a86f0bf",[],"[{'_id': '6389e86a92ad533329bf00fe', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1669982276443-noauth.jpeg', 'fullname': 'Dennis Ulmer', 'name': 'kaleidophon', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6}]","[{'reaction': '🔥', 'users': ['Sri-Vigneshwar-DJ', 'John6666', 'kaleidophon', 'clem'], 'count': 4}]",2024-11-20 20:00:14,2024-11-20 20:00:14.591,[],/posts/mgubri/873720319675748,933,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d50e9ef9cbfa798c590004/FlVe8chafigMfrPpMeJRL.jpeg,133.0,Jared Sulzdorf,jsulz,770595143636260,"[{'type': 'text', 'value': 'When the XetHub crew joined Hugging Face this fall, ', 'raw': 'When the XetHub crew joined Hugging Face this fall, '}, {'type': 'mention', 'user': 'erinys', 'raw': '@erinys'}, {'type': 'text', 'value': ' and I started brainstorming how to share our work to replace Git LFS on the Hub. Uploading and downloading large models and datasets takes precious time. That’s where our chunk-based approach comes in.', 'raw': ' and I started brainstorming how to share our work to replace Git LFS on the Hub. Uploading and downloading large models and datasets takes precious time. That’s where our chunk-based approach comes in.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instead of versioning files (like Git and Git LFS), we version variable-sized chunks of data. For the Hugging Face community, this means:', 'raw': 'Instead of versioning files (like Git and Git LFS), we version variable-sized chunks of data. For the Hugging Face community, this means:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⏩ Only upload the chunks that changed.', 'raw': '⏩ Only upload the chunks that changed.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Download just the updates, not the whole file.', 'raw': '🚀 Download just the updates, not the whole file.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 We store your file as deduplicated chunks', 'raw': '🧠 We store your file as deduplicated chunks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In our benchmarks, we found that using CDC to store iterative model and dataset version led to transfer speedups of ~2x, but this isn’t just a performance boost. It’s a rethinking of how we manage models and datasets on the Hub.', 'raw': 'In our benchmarks, we found that using CDC to store iterative model and dataset version led to transfer speedups of ~2x, but this isn’t just a performance boost. It’s a rethinking of how we manage models and datasets on the Hub.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're planning on our new storage backend to the Hub in early 2025 - check out our blog to dive deeper, and let us know: how could this improve your workflows?"", 'raw': ""We're planning on our new storage backend to the Hub in early 2025 - check out our blog to dive deeper, and let us know: how could this improve your workflows?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/from-files-to-chunks', 'raw': 'https://huggingface.co/blog/from-files-to-chunks'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","When the XetHub crew joined Hugging Face this fall, @erinys and I started brainstorming how to share our work to replace Git LFS on the Hub. Uploading and downloading large models and datasets takes precious time. That’s where our chunk-based approach comes in. + +Instead of versioning files (like Git and Git LFS), we version variable-sized chunks of data. For the Hugging Face community, this means: + +⏩ Only upload the chunks that changed. +🚀 Download just the updates, not the whole file. +🧠 We store your file as deduplicated chunks + +In our benchmarks, we found that using CDC to store iterative model and dataset version led to transfer speedups of ~2x, but this isn’t just a performance boost. It’s a rethinking of how we manage models and datasets on the Hub. + +We're planning on our new storage backend to the Hub in early 2025 - check out our blog to dive deeper, and let us know: how could this improve your workflows? + +https://huggingface.co/blog/from-files-to-chunks ",[],"[{'_id': '66b05ca6e7c57eac7cafbbc4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66b05ca6e7c57eac7cafbbc4/nddUkS3xu78cxCS-r7-xB.jpeg', 'fullname': 'Ann Huang', 'name': 'erinys', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 36}]","[{'reaction': '🔥', 'users': ['assafvayner', 'andrewrreed', 'garrethlee', 'Sri-Vigneshwar-DJ', 'port8080', 'victor', 'shawon', 'John6666', 'Joseph717171', 'julien-c', 'davidberenstein1957', 'Norod78', 'johnlockejrr', 'davanstrien', 'erinys', 'BrigitteTousi', 'clem', 'Dref360', 'ZennyKenny'], 'count': 19}, {'reaction': '❤️', 'users': ['assafvayner', 'andrewrreed', 'Joseph717171', 'julien-c', 'davidberenstein1957', 'BrigitteTousi', 'clem', 'Dref360'], 'count': 8}, {'reaction': '🧠', 'users': ['assafvayner', 'ArthurZ', 'Joseph717171', 'davidberenstein1957', 'BrigitteTousi', 'clem', 'Dref360'], 'count': 7}]",2024-11-20 19:20:17,2024-11-20 19:20:33.190,[],/posts/jsulz/770595143636260,2985,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64808a8c856901b0edb96245/UVa3ztQ8DRM47S8Rsk4Rz.jpeg,20.0,John Johnson,jjokah,108642255859843,"[{'type': 'text', 'value': ""Google's revamped Machine Learning Crash Course covers the recent advances in AI, with an increased focus on interactive learning. "", 'raw': ""Google's revamped Machine Learning Crash Course covers the recent advances in AI, with an increased focus on interactive learning. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 100+ exercises', 'raw': '📝 100+ exercises'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗂 12 modules', 'raw': '🗂 12 modules'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🕒 15 hours', 'raw': '🕒 15 hours'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📹 Video explainers of ML concepts', 'raw': '📹 Video explainers of ML concepts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌎 Real-world examples', 'raw': '🌎 Real-world examples'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Interactive visualizations', 'raw': '📊 Interactive visualizations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ref:', 'raw': 'Ref:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://developers.google.com/machine-learning/crash-course', 'raw': 'https://developers.google.com/machine-learning/crash-course'}]","Google's revamped Machine Learning Crash Course covers the recent advances in AI, with an increased focus on interactive learning. + +📝 100+ exercises +🗂 12 modules +🕒 15 hours +📹 Video explainers of ML concepts +🌎 Real-world examples +📊 Interactive visualizations + +Ref: +https://developers.google.com/machine-learning/crash-course","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64808a8c856901b0edb96245/ejAM9a27e_l4JvbfSPeu1.png'}]",[],"[{'reaction': '👍', 'users': ['Jayachandran1', 'John6666', 'jjokah', 'daniel-ltw'], 'count': 4}, {'reaction': '🔥', 'users': ['Shahrokhpk'], 'count': 1}]",2024-11-20 17:43:10,2024-11-20 17:43:10.202,[],/posts/jjokah/108642255859843,795,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,621479972109566,"[{'type': 'text', 'value': '🎵 Introducing Tamago Music Dataset - ', 'raw': '🎵 Introducing Tamago Music Dataset - '}, {'type': 'link', 'href': 'https://huggingface.co/datasets/nyuuzyou/tamago', 'raw': 'https://huggingface.co/datasets/nyuuzyou/tamago'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A collection of 1,567 music tracks featuring:', 'raw': 'A collection of 1,567 music tracks featuring:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Complete metadata with audio files and cover artwork', 'raw': '- Complete metadata with audio files and cover artwork'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Rich track information including titles, descriptions, and genres', 'raw': '- Rich track information including titles, descriptions, and genres'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- User engagement metrics like play counts and reactions', 'raw': '- User engagement metrics like play counts and reactions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- English language content from independent artists', 'raw': '- English language content from independent artists'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Released under Creative Commons Zero (CC0) license', 'raw': '- Released under Creative Commons Zero (CC0) license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset structure includes:', 'raw': 'Dataset structure includes:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Track metadata (titles, descriptions, genres, tags)', 'raw': '- Track metadata (titles, descriptions, genres, tags)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Associated media (audio files, cover images)', 'raw': '- Associated media (audio files, cover images)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Artist information and engagement metrics', 'raw': '- Artist information and engagement metrics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Particularly valuable for:', 'raw': 'Particularly valuable for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Music generation model training', 'raw': '- Music generation model training'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cross-modal analysis', 'raw': '- Cross-modal analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Audio classification tasks', 'raw': '- Audio classification tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Music style and genre analysis', 'raw': '- Music style and genre analysis'}]","🎵 Introducing Tamago Music Dataset - https://huggingface.co/datasets/nyuuzyou/tamago + +A collection of 1,567 music tracks featuring: + +- Complete metadata with audio files and cover artwork +- Rich track information including titles, descriptions, and genres +- User engagement metrics like play counts and reactions +- English language content from independent artists +- Released under Creative Commons Zero (CC0) license + +Dataset structure includes: +- Track metadata (titles, descriptions, genres, tags) +- Associated media (audio files, cover images) +- Artist information and engagement metrics + +Particularly valuable for: +- Music generation model training +- Cross-modal analysis +- Audio classification tasks +- Music style and genre analysis",[],[],"[{'reaction': '🔥', 'users': ['John6666'], 'count': 1}]",2024-11-20 17:19:47,2024-11-20 17:19:47.817,[],/posts/nyuuzyou/621479972109566,315,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/642827944fe87caede802784/a7s3Ub9Cy6-PuuaX8wwXm.png,83.0,VILARIN,vilarin,726262132598364,"[{'type': 'text', 'value': '🏄\u200d♂️While browsing new models, I stumbled upon Lumiere from aixonlab. After testing it, I feel it has considerable potential. Keep up the good work!', 'raw': '🏄\u200d♂️While browsing new models, I stumbled upon Lumiere from aixonlab. After testing it, I feel it has considerable potential. Keep up the good work!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lumiere Alpha is a model focusing on improving realism without compromising prompt coherency or changing the composition completely from the original Flux.1-Dev model.', 'raw': 'Lumiere Alpha is a model focusing on improving realism without compromising prompt coherency or changing the composition completely from the original Flux.1-Dev model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦄 Model: ', 'raw': '🦄 Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'aixonlab/flux.1-lumiere-alpha'}, 'url': 'https://huggingface.co/aixonlab/flux.1-lumiere-alpha', 'raw': 'https://huggingface.co/aixonlab/flux.1-lumiere-alpha'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦖 Demo: ', 'raw': '🦖 Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'vilarin/lumiere'}, 'url': 'https://huggingface.co/spaces/vilarin/lumiere', 'raw': 'https://huggingface.co/spaces/vilarin/lumiere'}]","🏄‍♂️While browsing new models, I stumbled upon Lumiere from aixonlab. After testing it, I feel it has considerable potential. Keep up the good work! + +Lumiere Alpha is a model focusing on improving realism without compromising prompt coherency or changing the composition completely from the original Flux.1-Dev model. + +🦄 Model: https://huggingface.co/aixonlab/flux.1-lumiere-alpha + +🦖 Demo: https://huggingface.co/spaces/vilarin/lumiere","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/642827944fe87caede802784/-Tbr9Wk5k4f0bFxYrXRMb.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/642827944fe87caede802784/cCFpUzPiXjHOvTgS1vaXu.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/642827944fe87caede802784/hzrwDsmKlQDVkVJuPz_XZ.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/642827944fe87caede802784/G8RKshOWIjxDYyiB9Q25J.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/642827944fe87caede802784/-okzntroIdAkDFq924TNf.png'}]",[],"[{'reaction': '🔥', 'users': ['orrinin', 'BrigitteTousi', 'Sri-Vigneshwar-DJ', 'hitchhiker3010', 'John6666'], 'count': 5}]",2024-11-20 17:08:36,2025-04-29 11:43:52.983,"[{'_id': '64282d3deb2891d3746a1f1e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64282d3deb2891d3746a1f1e/V7xBCMfcShiMTjjJYaJBv.png', 'fullname': 'orrin', 'name': 'orrinin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/vilarin/726262132598364,1424,,2 +/avatars/cf21cf2c8f1c9d5a8fb35761acdef04b.svg,46.0,Emin Temiz,etemiz,544410447143869,"[{'type': 'text', 'value': 'if I host in hf spaces, can I interact with the app using an API?', 'raw': 'if I host in hf spaces, can I interact with the app using an API?'}]","if I host in hf spaces, can I interact with the app using an API?",[],[],"[{'reaction': '🔥', 'users': ['Clausss', 'Sri-Vigneshwar-DJ'], 'count': 2}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-20 14:37:33,2024-11-20 18:20:49.356,"[{'_id': '64808a8c856901b0edb96245', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64808a8c856901b0edb96245/UVa3ztQ8DRM47S8Rsk4Rz.jpeg', 'fullname': 'John Johnson', 'name': 'jjokah', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 20, 'isFollowing': False}]",/posts/etemiz/544410447143869,974,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,595748928581589,"[{'type': 'text', 'value': ""🍅 Glif App's Remixes feature allows you to slap a logo onto anything, seamlessly integrating the input image (logo) into various contexts. The result is stunning remixes that blend the input logo with generated images (img2img logo mapping) for incredible outcomes. "", 'raw': ""🍅 Glif App's Remixes feature allows you to slap a logo onto anything, seamlessly integrating the input image (logo) into various contexts. The result is stunning remixes that blend the input logo with generated images (img2img logo mapping) for incredible outcomes. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out Any Logo Anywhere remixes on Glif: [Glif Remixes](', 'raw': 'Check out Any Logo Anywhere remixes on Glif: [Glif Remixes]('}, {'type': 'link', 'href': 'https://glif.app/glifs/cm3o7dfsd002610z48sz89yih/remixes', 'raw': 'https://glif.app/glifs/cm3o7dfsd002610z48sz89yih/remixes'}, {'type': 'text', 'value': ') ', 'raw': ') '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐The browser extension enables thousands of Glif-based img2img workflows on any image you find online. Experience Glif Remix with WebAI: [Chrome Extension](', 'raw': '🌐The browser extension enables thousands of Glif-based img2img workflows on any image you find online. Experience Glif Remix with WebAI: [Chrome Extension]('}, {'type': 'link', 'href': 'https://chromewebstore.google.com/detail/glif-remix-the-web-with-a/abfbooehhdjcgmbmcpkcebcmpfnlingo', 'raw': 'https://chromewebstore.google.com/detail/glif-remix-the-web-with-a/abfbooehhdjcgmbmcpkcebcmpfnlingo'}, {'type': 'text', 'value': ') ', 'raw': ') '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗Have fun with the cool stuff !!', 'raw': '🤗Have fun with the cool stuff !!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'prithivMLmods', 'raw': '@prithivMLmods'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","🍅 Glif App's Remixes feature allows you to slap a logo onto anything, seamlessly integrating the input image (logo) into various contexts. The result is stunning remixes that blend the input logo with generated images (img2img logo mapping) for incredible outcomes. + +Check out Any Logo Anywhere remixes on Glif: [Glif Remixes](https://glif.app/glifs/cm3o7dfsd002610z48sz89yih/remixes) + +🌐The browser extension enables thousands of Glif-based img2img workflows on any image you find online. Experience Glif Remix with WebAI: [Chrome Extension](https://chromewebstore.google.com/detail/glif-remix-the-web-with-a/abfbooehhdjcgmbmcpkcebcmpfnlingo) + +. +. +. +🤗Have fun with the cool stuff !! +@prithivMLmods +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/2R9bl_5FFJu1ClPW72odV.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/NUJGsiKV6fNrRXFno_Jlk.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/XrSXtfjWLuweVyBQUlOoU.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/jVdmJYsG1u5iARK0viMiZ.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/XfsydXC9ryOhkHAUPlTW1.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/UN2NCqeMS9_YHQ-oddvsn.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/mK1Z7dQ4trCcsIDqZCQME.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ks-Hx1DXdO6VPmzUA3y4U.jpeg'}]","[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957}]","[{'reaction': '🤗', 'users': ['dvilasuero', 'Mrdesigner14', 'BrigitteTousi', 'victor'], 'count': 4}, {'reaction': '❤️', 'users': ['dvilasuero', 'Mrdesigner14', 'BrigitteTousi'], 'count': 3}, {'reaction': '👍', 'users': ['Mrdesigner14', 'BrigitteTousi'], 'count': 2}, {'reaction': '👀', 'users': ['Mrdesigner14', 'John6666'], 'count': 2}, {'reaction': '🔥', 'users': ['regergregre54545'], 'count': 1}]",2024-11-20 14:37:26,2024-11-20 14:41:10.233,[],/posts/prithivMLmods/595748928581589,2064,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,985768918496418,"[{'type': 'text', 'value': '🔍 Meta teams use a fine-tuned Llama model to fix production issues in seconds', 'raw': '🔍 Meta teams use a fine-tuned Llama model to fix production issues in seconds'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""One of Meta's engineering teams shared how they use a fine-tuned small Llama (Llama-2-7B, so not even a very recent model) to identify the root cause of production issues with 42% accuracy."", 'raw': ""One of Meta's engineering teams shared how they use a fine-tuned small Llama (Llama-2-7B, so not even a very recent model) to identify the root cause of production issues with 42% accuracy.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤔 42%, is that not too low?', 'raw': '🤔 42%, is that not too low?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""➡️ Usually, whenever there's an issue in production, engineers dive into recent code changes to find the offending commit. At Meta's scale (thousands of daily changes), this is like finding a needle in a haystack."", 'raw': ""➡️ Usually, whenever there's an issue in production, engineers dive into recent code changes to find the offending commit. At Meta's scale (thousands of daily changes), this is like finding a needle in a haystack.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 So when the LLM-based suggestion is right, it cuts incident resolution time from hours to seconds!', 'raw': '💡 So when the LLM-based suggestion is right, it cuts incident resolution time from hours to seconds!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How did they do it?', 'raw': 'How did they do it?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔄 Two-step approach:', 'raw': '🔄 Two-step approach:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Heuristics (code ownership, directory structure, runtime graphs) reduce thousands of potential changes to a manageable set', 'raw': '‣ Heuristics (code ownership, directory structure, runtime graphs) reduce thousands of potential changes to a manageable set'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Fine-tuned Llama 2 7B ranks the most likely culprits', 'raw': '‣ Fine-tuned Llama 2 7B ranks the most likely culprits'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎓 Training pipeline:', 'raw': '🎓 Training pipeline:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""‣ Continued pre-training on Meta's internal docs and wikis"", 'raw': ""‣ Continued pre-training on Meta's internal docs and wikis""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Supervised fine-tuning on past incident investigations', 'raw': '‣ Supervised fine-tuning on past incident investigations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Training data mimicked real-world constraints (2-20 potential changes per incident)', 'raw': '‣ Training data mimicked real-world constraints (2-20 potential changes per incident)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔮 Now future developments await:', 'raw': '🔮 Now future developments await:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Language models could handle more of the incident response workflow (runbooks, mitigation, post-mortems)', 'raw': '‣ Language models could handle more of the incident response workflow (runbooks, mitigation, post-mortems)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Improvements in model reasoning should boost accuracy further', 'raw': '‣ Improvements in model reasoning should boost accuracy further'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read it in full 👉 ', 'raw': 'Read it in full 👉 '}, {'type': 'link', 'href': 'https://www.tryparity.com/blog/how-meta-uses-llms-to-improve-incident-response', 'raw': 'https://www.tryparity.com/blog/how-meta-uses-llms-to-improve-incident-response'}]","🔍 Meta teams use a fine-tuned Llama model to fix production issues in seconds + +One of Meta's engineering teams shared how they use a fine-tuned small Llama (Llama-2-7B, so not even a very recent model) to identify the root cause of production issues with 42% accuracy. + +🤔 42%, is that not too low? +➡️ Usually, whenever there's an issue in production, engineers dive into recent code changes to find the offending commit. At Meta's scale (thousands of daily changes), this is like finding a needle in a haystack. +💡 So when the LLM-based suggestion is right, it cuts incident resolution time from hours to seconds! + +How did they do it? + +🔄 Two-step approach: +‣ Heuristics (code ownership, directory structure, runtime graphs) reduce thousands of potential changes to a manageable set +‣ Fine-tuned Llama 2 7B ranks the most likely culprits + +🎓 Training pipeline: +‣ Continued pre-training on Meta's internal docs and wikis +‣ Supervised fine-tuning on past incident investigations +‣ Training data mimicked real-world constraints (2-20 potential changes per incident) + +🔮 Now future developments await: +‣ Language models could handle more of the incident response workflow (runbooks, mitigation, post-mortems) +‣ Improvements in model reasoning should boost accuracy further + +Read it in full 👉 https://www.tryparity.com/blog/how-meta-uses-llms-to-improve-incident-response",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['rbgo'], 'count': 1}]",2024-11-20 13:48:08,2024-11-20 13:48:08.998,[],/posts/m-ric/985768918496418,795,,0 +/avatars/fb866e3758189d70488fc6a879151f45.svg,21.0,Akihito Miyazaki,Akjava,611948696998118,"[{'type': 'text', 'value': 'Finaly I realesed mediapipe-face animation space.', 'raw': 'Finaly I realesed mediapipe-face animation space.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mediapipe 68-points Eyes-Closed and Mouth-Opened', 'raw': 'Mediapipe 68-points Eyes-Closed and Mouth-Opened'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened'}, 'url': 'https://huggingface.co/spaces/Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened', 'raw': 'https://huggingface.co/spaces/Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[Article]Results: Converted Guide Images(eyes-closed and mouth-opened) with Flux.1 schenll img2img/inpaint', 'raw': '[Article]Results: Converted Guide Images(eyes-closed and mouth-opened) with Flux.1 schenll img2img/inpaint'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/Akjava/result-guide-image-eyes-mouth', 'raw': 'https://huggingface.co/blog/Akjava/result-guide-image-eyes-mouth'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All the other tools listed are designed to support Mediapipe Face Animation', 'raw': 'All the other tools listed are designed to support Mediapipe Face Animation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7'}, 'url': 'https://huggingface.co/collections/Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7', 'raw': 'https://huggingface.co/collections/Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739'}, 'url': 'https://huggingface.co/collections/Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739', 'raw': 'https://huggingface.co/collections/Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739'}]","Finaly I realesed mediapipe-face animation space. + +Mediapipe 68-points Eyes-Closed and Mouth-Opened +https://huggingface.co/spaces/Akjava/mediapipe-68-facial-guide-eyes-closed-mouth-opened + +[Article]Results: Converted Guide Images(eyes-closed and mouth-opened) with Flux.1 schenll img2img/inpaint +https://huggingface.co/blog/Akjava/result-guide-image-eyes-mouth + +All the other tools listed are designed to support Mediapipe Face Animation + +https://huggingface.co/collections/Akjava/mediapipe-tools-672ffe8ee7b62763c31b70c7 + +https://huggingface.co/collections/Akjava/webp-3-frame-talking-animation-tools-672819ce4989f354cdbcc739",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2024-11-17 12:43:13,2024-11-17 12:43:13.043,[],/posts/Akjava/611948696998118,541,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,478756824597278,"[{'type': 'text', 'value': 'Good folks at ', 'raw': 'Good folks at '}, {'type': 'mention', 'user': 'nvidia', 'raw': '@nvidia'}, {'type': 'text', 'value': ' and @Tsinghua_Uni have released LLAMA-MESH - A Revolutionary Approach to 3D Content Generation!', 'raw': ' and @Tsinghua_Uni have released LLAMA-MESH - A Revolutionary Approach to 3D Content Generation!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This innovative framework enables the direct generation of 3D meshes from natural language prompts while maintaining strong language capabilities.', 'raw': 'This innovative framework enables the direct generation of 3D meshes from natural language prompts while maintaining strong language capabilities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here is the Architecture & Implementation!', 'raw': 'Here is the Architecture & Implementation!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Core Components', 'raw': '>> Core Components'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model Foundation ', 'raw': 'Model Foundation '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- If you haven't guessed it yet, it's built on the LLaMA-3.1-8B-Instruct base model "", 'raw': ""- If you haven't guessed it yet, it's built on the LLaMA-3.1-8B-Instruct base model ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Maintains original language capabilities while adding 3D generation ', 'raw': '- Maintains original language capabilities while adding 3D generation '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Context length is set to 8,000 tokens ', 'raw': '- Context length is set to 8,000 tokens '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3D Representation Strategy ', 'raw': '3D Representation Strategy '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Uses the OBJ file format for mesh representation ', 'raw': '- Uses the OBJ file format for mesh representation '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Quantizes vertex coordinates into 64 discrete bins per axis ', 'raw': '- Quantizes vertex coordinates into 64 discrete bins per axis '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Sorts vertices by z-y-x coordinates, from lowest to highest ', 'raw': '- Sorts vertices by z-y-x coordinates, from lowest to highest '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Sorts faces by the lowest vertex indices for consistency ', 'raw': '- Sorts faces by the lowest vertex indices for consistency '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Data Processing Pipeline ', 'raw': 'Data Processing Pipeline '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Filters meshes to a maximum of 500 faces for computational efficiency ', 'raw': '- Filters meshes to a maximum of 500 faces for computational efficiency '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Applies random rotations (0°, 90°, 180°, 270°) for data augmentation ', 'raw': '- Applies random rotations (0°, 90°, 180°, 270°) for data augmentation '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Generates ~125k mesh variations from 31k base meshes ', 'raw': '- Generates ~125k mesh variations from 31k base meshes '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Uses Cap3D-generated captions for text descriptions ', 'raw': '- Uses Cap3D-generated captions for text descriptions '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '>> Training Framework', 'raw': '>> Training Framework'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset Composition ', 'raw': 'Dataset Composition '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 40% Mesh Generation tasks ', 'raw': '- 40% Mesh Generation tasks '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 20% Mesh Understanding tasks ', 'raw': '- 20% Mesh Understanding tasks '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 40% General Conversation (UltraChat dataset) ', 'raw': '- 40% General Conversation (UltraChat dataset) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 8x training turns for generation, 4x for understanding ', 'raw': '- 8x training turns for generation, 4x for understanding '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Training Configuration ', 'raw': 'Training Configuration '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Deployed on 32 A100 GPUs (for Nvidia, this is literally in-house) ', 'raw': '- Deployed on 32 A100 GPUs (for Nvidia, this is literally in-house) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 21,000 training iterations ', 'raw': '- 21,000 training iterations '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Global batch size: 128 ', 'raw': '- Global batch size: 128 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AdamW optimizer with a 1e-5 learning rate ', 'raw': '- AdamW optimizer with a 1e-5 learning rate '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 30-step warmup with cosine scheduling ', 'raw': '- 30-step warmup with cosine scheduling '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Total training time: approximately 3 days (based on the paper) ', 'raw': '- Total training time: approximately 3 days (based on the paper) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This research opens exciting possibilities for intuitive 3D content creation through natural language interaction. The future of digital design is conversational!', 'raw': 'This research opens exciting possibilities for intuitive 3D content creation through natural language interaction. The future of digital design is conversational!'}]","Good folks at @nvidia and @Tsinghua_Uni have released LLAMA-MESH - A Revolutionary Approach to 3D Content Generation! + +This innovative framework enables the direct generation of 3D meshes from natural language prompts while maintaining strong language capabilities. + +Here is the Architecture & Implementation! + +>> Core Components + +Model Foundation +- If you haven't guessed it yet, it's built on the LLaMA-3.1-8B-Instruct base model +- Maintains original language capabilities while adding 3D generation +- Context length is set to 8,000 tokens + +3D Representation Strategy +- Uses the OBJ file format for mesh representation +- Quantizes vertex coordinates into 64 discrete bins per axis +- Sorts vertices by z-y-x coordinates, from lowest to highest +- Sorts faces by the lowest vertex indices for consistency + +Data Processing Pipeline +- Filters meshes to a maximum of 500 faces for computational efficiency +- Applies random rotations (0°, 90°, 180°, 270°) for data augmentation +- Generates ~125k mesh variations from 31k base meshes +- Uses Cap3D-generated captions for text descriptions + +>> Training Framework + +Dataset Composition +- 40% Mesh Generation tasks +- 20% Mesh Understanding tasks +- 40% General Conversation (UltraChat dataset) +- 8x training turns for generation, 4x for understanding + +Training Configuration +- Deployed on 32 A100 GPUs (for Nvidia, this is literally in-house) +- 21,000 training iterations +- Global batch size: 128 +- AdamW optimizer with a 1e-5 learning rate +- 30-step warmup with cosine scheduling +- Total training time: approximately 3 days (based on the paper) + +This research opens exciting possibilities for intuitive 3D content creation through natural language interaction. The future of digital design is conversational!","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/7UzRyFrbCXT2wC_QDLKLx.mp4'}]",[],"[{'reaction': '🔥', 'users': ['Mrdesigner14', 'John6666', 'roland0822', 'DatGG', 'KadirErturk', 'EdilCamil'], 'count': 6}, {'reaction': '🚀', 'users': ['John6666', 'casper911'], 'count': 2}, {'reaction': '👍', 'users': ['csabakecskemeti', 'gauravpatil'], 'count': 2}]",2024-11-17 07:57:31,2024-11-17 07:57:31.455,[],/posts/singhsidhukuldeep/478756824597278,2304,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,578160125260008,"[{'type': 'text', 'value': 'OmniVision-968M: a new local VLM for edge devices, fast & small but performant', 'raw': 'OmniVision-968M: a new local VLM for edge devices, fast & small but performant'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💨 a new vision language model with 9x less image tokens, super efficient ', 'raw': '💨 a new vision language model with 9x less image tokens, super efficient '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 aligned with DPO for reducing hallucinations', 'raw': '📖 aligned with DPO for reducing hallucinations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡️ Apache 2.0 license 🔥', 'raw': '⚡️ Apache 2.0 license 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo hf.co/spaces/NexaAIDev/omnivlm-dpo-demo', 'raw': 'Demo hf.co/spaces/NexaAIDev/omnivlm-dpo-demo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model ', 'raw': 'Model '}, {'type': 'link', 'href': 'https://huggingface.co/NexaAIDev/omnivision-968M', 'raw': 'https://huggingface.co/NexaAIDev/omnivision-968M'}]","OmniVision-968M: a new local VLM for edge devices, fast & small but performant +💨 a new vision language model with 9x less image tokens, super efficient +📖 aligned with DPO for reducing hallucinations +⚡️ Apache 2.0 license 🔥 + +Demo hf.co/spaces/NexaAIDev/omnivlm-dpo-demo +Model https://huggingface.co/NexaAIDev/omnivision-968M","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/UpftcDUFh7eDXfvTbRROY.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'John6666', 'quyet7779', 'typesdigital', 'Csplk', 'Norod78', 'not-lain', 'Sri-Vigneshwar-DJ', 'ai-everyday', 'victor', 'lhoestq', 'Nydaym', 'Catering3733', 'ogozcelik', 'ucsahin', 'appvoid', 'FGOTYT', 'OmbelineM'], 'count': 18}, {'reaction': '👀', 'users': ['Csplk', 'maxiw', 'not-lain', 'ucsahin'], 'count': 4}, {'reaction': '🤗', 'users': ['prithivMLmods', 'ucsahin'], 'count': 2}]",2024-11-16 23:26:19,2024-11-18 16:19:47.318,"[{'_id': '6048ea0c0f59ab4b614f1836', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6048ea0c0f59ab4b614f1836/8Eg8IyPtJgOHmywcJ7E8a.jpeg', 'fullname': 'RITABRATA MAITI', 'name': 'ritabratamaiti', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}]",/posts/merve/578160125260008,5372,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/6629552c96f529a39bac7c89/TsEF0qbFTW4MOJ31OhMKT.png,1048.0,Hexgrad,hexgrad,269038377723431,"[{'type': 'text', 'value': 'Kokoro: a small, fast 80M param TTS model hosted on ZeroGPU at ', 'raw': 'Kokoro: a small, fast 80M param TTS model hosted on ZeroGPU at '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'hexgrad/Kokoro-TTS'}, 'url': 'https://hf.co/spaces/hexgrad/Kokoro-TTS', 'raw': 'https://hf.co/spaces/hexgrad/Kokoro-TTS'}]","Kokoro: a small, fast 80M param TTS model hosted on ZeroGPU at https://hf.co/spaces/hexgrad/Kokoro-TTS",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'John6666', 'Pendrokar', 'Sri-Vigneshwar-DJ', 'ai-everyday', 'bendangelo', 'ecyht2', 'merve', 'deki', 'victor', 's3nh', 'Gatozu35', 'fireblade2534', 'linz'], 'count': 14}]",2024-11-16 22:37:07,2024-11-18 07:27:42.811,"[{'_id': '64a68b22dc4fef83689a06b5', 'avatarUrl': '/avatars/a1d86d990de3b90ed8fdb29c60337219.svg', 'fullname': 'Be', 'name': 'bendangelo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '65acc58c14d782df067f759b', 'avatarUrl': '/avatars/52a153d04d325469e1be69bce610ebe5.svg', 'fullname': 'Tan Hong Kai', 'name': 'ecyht2', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}]",/posts/hexgrad/269038377723431,3427,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e6d37e02dee9bcb9d9fa18/os24VYiNCoyth9yQSdv_A.jpeg,69.0,Csaba Kecskemeti,csabakecskemeti,810523164589585,"[{'type': 'text', 'value': ""I've built a small open utility pip package called LLM-Forwarder that allows you to inject context, such as adding a private RAG, into existing chat applications by forwarding the app through the LLM-Forwarder. In the forwarder server, you can configure custom code to re-process chat messages and alter the user prompt, for example, by adding extra context."", 'raw': ""I've built a small open utility pip package called LLM-Forwarder that allows you to inject context, such as adding a private RAG, into existing chat applications by forwarding the app through the LLM-Forwarder. In the forwarder server, you can configure custom code to re-process chat messages and alter the user prompt, for example, by adding extra context.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://pypi.org/project/llm-forwarder/', 'raw': 'https://pypi.org/project/llm-forwarder/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details', 'raw': 'More details'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://devquasar.com/llmforwarder/', 'raw': 'https://devquasar.com/llmforwarder/'}]","I've built a small open utility pip package called LLM-Forwarder that allows you to inject context, such as adding a private RAG, into existing chat applications by forwarding the app through the LLM-Forwarder. In the forwarder server, you can configure custom code to re-process chat messages and alter the user prompt, for example, by adding extra context. + +https://pypi.org/project/llm-forwarder/ +More details +https://devquasar.com/llmforwarder/",[],[],"[{'reaction': '👀', 'users': ['John6666', 'Sri-Vigneshwar-DJ', 'Josephgflowers'], 'count': 3}, {'reaction': '👍', 'users': ['PotenciaIA'], 'count': 1}]",2024-11-16 21:58:39,2024-11-16 21:58:39.937,[],/posts/csabakecskemeti/810523164589585,1512,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/637251142f98dcc049b349de/kkRLjyaO55_nFrTNWRZFQ.jpeg,43.0,Haghiri,Muhammadreza,941904865732185,"[{'type': 'text', 'value': ""Mann-E's new platform is up and running. "", 'raw': ""Mann-E's new platform is up and running. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can access our platform here at ', 'raw': 'You can access our platform here at '}, {'type': 'link', 'href': 'https://mann-e.com', 'raw': 'https://mann-e.com'}, {'type': 'text', 'value': "". We're still working on it and reducing the bugs and we also are trying to add a guest session which lets you make images as guests. "", 'raw': "". We're still working on it and reducing the bugs and we also are trying to add a guest session which lets you make images as guests. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What do you think?', 'raw': 'What do you think?'}]","Mann-E's new platform is up and running. + +You can access our platform here at https://mann-e.com. We're still working on it and reducing the bugs and we also are trying to add a guest session which lets you make images as guests. + +What do you think?",[],[],"[{'reaction': '🔥', 'users': ['Sri-Vigneshwar-DJ'], 'count': 1}]",2024-11-16 08:42:37,2024-11-16 16:39:47.790,"[{'_id': '65665b2be0977cf44f6d3867', 'avatarUrl': '/avatars/1d89852f84242051f859cdaf294e929a.svg', 'fullname': 'J Carl', 'name': 'jsan5344534', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '637251142f98dcc049b349de', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/637251142f98dcc049b349de/kkRLjyaO55_nFrTNWRZFQ.jpeg', 'fullname': 'Haghiri', 'name': 'Muhammadreza', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 43, 'isFollowing': False}]",/posts/Muhammadreza/941904865732185,877,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,911206794813112,"[{'type': 'text', 'value': ""It's not every day you see the No. 1 ranked paper of the day open-sourcing a very powerful image editing app!"", 'raw': ""It's not every day you see the No. 1 ranked paper of the day open-sourcing a very powerful image editing app!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fascinating to see MagicQuill - a groundbreaking interactive image editing system that makes precise photo editing effortless through advanced AI!', 'raw': 'Fascinating to see MagicQuill - a groundbreaking interactive image editing system that makes precise photo editing effortless through advanced AI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The system's architecture features three sophisticated components:"", 'raw': ""The system's architecture features three sophisticated components:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Editing Processor:', 'raw': '1. Editing Processor:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implements a dual-branch architecture integrated into a latent diffusion framework', 'raw': '- Implements a dual-branch architecture integrated into a latent diffusion framework'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Utilizes PiDiNet for edge map extraction and content-aware per-pixel inpainting', 'raw': '- Utilizes PiDiNet for edge map extraction and content-aware per-pixel inpainting'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Features a specialized UNet architecture with zero-convolution layers for feature insertion', 'raw': '- Features a specialized UNet architecture with zero-convolution layers for feature insertion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Employs denoising score matching for training the control branch', 'raw': '- Employs denoising score matching for training the control branch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Processes both structural modifications via scribble guidance and color manipulation through downsampled color blocks', 'raw': '- Processes both structural modifications via scribble guidance and color manipulation through downsampled color blocks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Maintains pixel-level control through VAE-based latent space operations', 'raw': '- Maintains pixel-level control through VAE-based latent space operations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Painting Assistor:', 'raw': '2. Painting Assistor:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Powered by a fine-tuned LLaVA multimodal LLM using Low-Rank Adaptation (LoRA)', 'raw': '- Powered by a fine-tuned LLaVA multimodal LLM using Low-Rank Adaptation (LoRA)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Trained on a custom dataset derived from Densely Captioned Images (DCI)', 'raw': '- Trained on a custom dataset derived from Densely Captioned Images (DCI)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Processes user brushstrokes through specialized Q&A tasks for add/subtract/color operations', 'raw': '- Processes user brushstrokes through specialized Q&A tasks for add/subtract/color operations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Features bounding box coordinate normalization for precise stroke localization', 'raw': '- Features bounding box coordinate normalization for precise stroke localization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implements streamlined single-word/phrase outputs for real-time performance', 'raw': '- Implements streamlined single-word/phrase outputs for real-time performance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Idea Collector:', 'raw': '3. Idea Collector:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Built as a modular ReactJS component library', 'raw': '- Built as a modular ReactJS component library'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Supports cross-platform deployment via HTTP protocols', 'raw': '- Supports cross-platform deployment via HTTP protocols'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Compatible with Gradio and ComfyUI frameworks', 'raw': '- Compatible with Gradio and ComfyUI frameworks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Features comprehensive layer management and parameter adjustment capabilities', 'raw': '- Features comprehensive layer management and parameter adjustment capabilities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Implements real-time canvas updates and preview generation', 'raw': '- Implements real-time canvas updates and preview generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The system outperforms existing solutions like SmartEdit and BrushNet in edge alignment and color fidelity while maintaining seamless integration with popular AI frameworks.', 'raw': 'The system outperforms existing solutions like SmartEdit and BrushNet in edge alignment and color fidelity while maintaining seamless integration with popular AI frameworks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What are your thoughts on AI-powered creative tools?', 'raw': 'What are your thoughts on AI-powered creative tools?'}]","It's not every day you see the No. 1 ranked paper of the day open-sourcing a very powerful image editing app! + +Fascinating to see MagicQuill - a groundbreaking interactive image editing system that makes precise photo editing effortless through advanced AI! + +The system's architecture features three sophisticated components: + +1. Editing Processor: +- Implements a dual-branch architecture integrated into a latent diffusion framework +- Utilizes PiDiNet for edge map extraction and content-aware per-pixel inpainting +- Features a specialized UNet architecture with zero-convolution layers for feature insertion +- Employs denoising score matching for training the control branch +- Processes both structural modifications via scribble guidance and color manipulation through downsampled color blocks +- Maintains pixel-level control through VAE-based latent space operations + +2. Painting Assistor: +- Powered by a fine-tuned LLaVA multimodal LLM using Low-Rank Adaptation (LoRA) +- Trained on a custom dataset derived from Densely Captioned Images (DCI) +- Processes user brushstrokes through specialized Q&A tasks for add/subtract/color operations +- Features bounding box coordinate normalization for precise stroke localization +- Implements streamlined single-word/phrase outputs for real-time performance + +3. Idea Collector: +- Built as a modular ReactJS component library +- Supports cross-platform deployment via HTTP protocols +- Compatible with Gradio and ComfyUI frameworks +- Features comprehensive layer management and parameter adjustment capabilities +- Implements real-time canvas updates and preview generation + +The system outperforms existing solutions like SmartEdit and BrushNet in edge alignment and color fidelity while maintaining seamless integration with popular AI frameworks. + +What are your thoughts on AI-powered creative tools?","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/KQ1ZPxvjSFUGvqxHN1pFj.mp4'}]",[],"[{'reaction': '❤️', 'users': ['Harbous', 'John6666', 'ethanker', 'iojvsuynv'], 'count': 4}, {'reaction': '🔥', 'users': ['elifarley'], 'count': 1}]",2024-11-16 05:49:12,2024-11-17 22:37:01.927,[],/posts/singhsidhukuldeep/911206794813112,1911,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,849483557668478,"[{'type': 'text', 'value': '🕊️Hope🕊️ and ⚖️Justice⚖️ AI', 'raw': '🕊️Hope🕊️ and ⚖️Justice⚖️ AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚲 Stolen bike in Denver FOUND - Sometimes hope & justice DO prevail. ', 'raw': '🚲 Stolen bike in Denver FOUND - Sometimes hope & justice DO prevail. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎬 So I Created an AI+Art+Music tribute: ', 'raw': '🎬 So I Created an AI+Art+Music tribute: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' -🧠 AI App that Evaluates GPT-4o vs Claude:', 'raw': ' -🧠 AI App that Evaluates GPT-4o vs Claude:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/RescuerOfStolenBikes'}, 'url': 'https://huggingface.co/spaces/awacke1/RescuerOfStolenBikes', 'raw': 'https://huggingface.co/spaces/awacke1/RescuerOfStolenBikes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://x.com/Aaron_Wacker/status/1857640877986033980?ref_src=twsrc%5Etfw%7Ctwcamp%5Etweetembed%7Ctwterm%5E1857640877986033980%7Ctwgr%5E203a5022b0eb4c41ee8c1dd9f158330216ac5be1%7Ctwcon%5Es1_c10&ref_url=https%3A%2F%2Fpublish.twitter.com%2F%3Furl%3Dhttps%3A%2F%2Ftwitter.com%2FAaron_Wacker%2Fstatus%2F1857640877986033980', 'raw': 'https://x.com/Aaron_Wacker/status/1857640877986033980?ref_src=twsrc%5Etfw%7Ctwcamp%5Etweetembed%7Ctwterm%5E1857640877986033980%7Ctwgr%5E203a5022b0eb4c41ee8c1dd9f158330216ac5be1%7Ctwcon%5Es1_c10&ref_url=https%3A%2F%2Fpublish.twitter.com%2F%3Furl%3Dhttps%3A%2F%2Ftwitter.com%2FAaron_Wacker%2Fstatus%2F1857640877986033980'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'html', 'code': '

QT your 🕊️Hope🕊️ and ⚖️Justice⚖️ art🎨

🚲 Stolen bike in Denver FOUND!
- Sometimes hope & justice DO prevail!

🎬 Created an AI+Art+Music tribute:
-🧠 AI App that Evaluates GPT-4o vs Claude: https://t.co/odrYdaeizZ
#GPT #Claude #Huggingface @OpenAI @AnthropicAI pic.twitter.com/Q9wGNzLm5C

— Aaron Wacker (@Aaron_Wacker) November 16, 2024
', 'raw': '```html\n

QT your 🕊️Hope🕊️ and ⚖️Justice⚖️ art🎨

🚲 Stolen bike in Denver FOUND!
- Sometimes hope & justice DO prevail!

🎬 Created an AI+Art+Music tribute:
-🧠 AI App that Evaluates GPT-4o vs Claude: https://t.co/odrYdaeizZ
#GPT #Claude #Huggingface @OpenAI @AnthropicAI pic.twitter.com/Q9wGNzLm5C

— Aaron Wacker (@Aaron_Wacker) November 16, 2024
\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#GPT #Claude #Huggingface ', 'raw': '#GPT #Claude #Huggingface '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'OpenAI', 'raw': '@OpenAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'AnthropicAI', 'raw': '@AnthropicAI'}]","🕊️Hope🕊️ and ⚖️Justice⚖️ AI +🚲 Stolen bike in Denver FOUND - Sometimes hope & justice DO prevail. + +🎬 So I Created an AI+Art+Music tribute: + -🧠 AI App that Evaluates GPT-4o vs Claude: +https://huggingface.co/spaces/awacke1/RescuerOfStolenBikes +https://x.com/Aaron_Wacker/status/1857640877986033980?ref_src=twsrc%5Etfw%7Ctwcamp%5Etweetembed%7Ctwterm%5E1857640877986033980%7Ctwgr%5E203a5022b0eb4c41ee8c1dd9f158330216ac5be1%7Ctwcon%5Es1_c10&ref_url=https%3A%2F%2Fpublish.twitter.com%2F%3Furl%3Dhttps%3A%2F%2Ftwitter.com%2FAaron_Wacker%2Fstatus%2F1857640877986033980 + +```html +

QT your 🕊️Hope🕊️ and ⚖️Justice⚖️ art🎨

🚲 Stolen bike in Denver FOUND!
- Sometimes hope & justice DO prevail!

🎬 Created an AI+Art+Music tribute:
-🧠 AI App that Evaluates GPT-4o vs Claude: https://t.co/odrYdaeizZ
#GPT #Claude #Huggingface @OpenAI @AnthropicAI pic.twitter.com/Q9wGNzLm5C

— Aaron Wacker (@Aaron_Wacker) November 16, 2024
+``` + +#GPT #Claude #Huggingface +@OpenAI +@AnthropicAI","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/s_ioS7W-sTgeLL40HaZwU.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/rrm2Bf7DXll7tViHftg5c.png'}]",[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2024-11-16 04:55:32,2024-11-16 05:15:15.661,[],/posts/awacke1/849483557668478,1027,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png,159.0,Richard A Aragon,TuringsSolutions,443393273871393,"[{'type': 'text', 'value': 'What if I told you that LLM models do not simply predict the next token in a sequence but instead utilize an emergent structural pattern-based system to comprehend language and concepts? I created a graph-based optimizer that not only works, but it also actually beats Adam, like very badly. I prove it thoroughly using SMOL LLM models. The secret? The graph is not what you think it is, humans. Code, full explanation, and more in this video. The Rhizome Optimizer is MIT licensed. I have completed my research. I fully understand now. ', 'raw': 'What if I told you that LLM models do not simply predict the next token in a sequence but instead utilize an emergent structural pattern-based system to comprehend language and concepts? I created a graph-based optimizer that not only works, but it also actually beats Adam, like very badly. I prove it thoroughly using SMOL LLM models. The secret? The graph is not what you think it is, humans. Code, full explanation, and more in this video. The Rhizome Optimizer is MIT licensed. I have completed my research. I fully understand now. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/OMCRRueMhdI', 'raw': 'https://youtu.be/OMCRRueMhdI'}]","What if I told you that LLM models do not simply predict the next token in a sequence but instead utilize an emergent structural pattern-based system to comprehend language and concepts? I created a graph-based optimizer that not only works, but it also actually beats Adam, like very badly. I prove it thoroughly using SMOL LLM models. The secret? The graph is not what you think it is, humans. Code, full explanation, and more in this video. The Rhizome Optimizer is MIT licensed. I have completed my research. I fully understand now. + +https://youtu.be/OMCRRueMhdI",[],[],"[{'reaction': '😔', 'users': ['takeraparterer'], 'count': 1}]",2024-11-16 03:10:27,2024-11-18 12:05:59.292,"[{'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/TuringsSolutions/443393273871393,476,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/wthru065DlrO99caaTL2R.png,6.0,shengzhi alex li,alexshengzhili,580108358026949,"[{'type': 'text', 'value': 'We’re excited to release Abstract2Appendix v1 10K , a high-quality dataset crafted to enhance the long-context capabilities of Large Language Models (LLMs). This dataset combines thousands of peer reviews from NeurIPS 2023, EMNLP 2023, TMLR, and ICLR 2023, making it a treasure trove of detailed feedback, critical reasoning, and structured academic insights. Our experiments showed that this dataset increased long context ability of phi-3 models! ', 'raw': 'We’re excited to release Abstract2Appendix v1 10K , a high-quality dataset crafted to enhance the long-context capabilities of Large Language Models (LLMs). This dataset combines thousands of peer reviews from NeurIPS 2023, EMNLP 2023, TMLR, and ICLR 2023, making it a treasure trove of detailed feedback, critical reasoning, and structured academic insights. Our experiments showed that this dataset increased long context ability of phi-3 models! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 Key Highlights:', 'raw': '🌟 Key Highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tExpert Reviews: Aggregated from 3–6 reviews per paper, capturing the most insightful and constructive content.', 'raw': '\t•\tExpert Reviews: Aggregated from 3–6 reviews per paper, capturing the most insightful and constructive content.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tRich Metadata: we have aggregated the reviews, and also included full parsed paper', 'raw': '\t•\tRich Metadata: we have aggregated the reviews, and also included full parsed paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tLLM Ready: Perfect for fine-tuning (We did dpo and sft) ', 'raw': '\t•\tLLM Ready: Perfect for fine-tuning (We did dpo and sft) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Use Cases:', 'raw': '🎯 Use Cases:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tFine-tuning models with Direct Preference Optimization (DPO) and Supervised Fine-Tuning (SFT).', 'raw': '\t•\tFine-tuning models with Direct Preference Optimization (DPO) and Supervised Fine-Tuning (SFT).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '\t•\tBenchmarking zero-shot and long-context comprehension capabilities.', 'raw': '\t•\tBenchmarking zero-shot and long-context comprehension capabilities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Explore the dataset: ', 'raw': '🔗 Explore the dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'alexshengzhili/Abstract2Appendix_v1_10k'}, 'url': 'https://huggingface.co/datasets/alexshengzhili/Abstract2Appendix_v1_10k', 'raw': 'https://huggingface.co/datasets/alexshengzhili/Abstract2Appendix_v1_10k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset is based on the methodology described in our recent paper, “Abstract2Appendix: Academic Reviews Enhance LLM Long-Context Capabilities”. Check it out for more details! ', 'raw': 'This dataset is based on the methodology described in our recent paper, “Abstract2Appendix: Academic Reviews Enhance LLM Long-Context Capabilities”. Check it out for more details! '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2411.05232', 'raw': 'https://arxiv.org/abs/2411.05232'}]","We’re excited to release Abstract2Appendix v1 10K , a high-quality dataset crafted to enhance the long-context capabilities of Large Language Models (LLMs). This dataset combines thousands of peer reviews from NeurIPS 2023, EMNLP 2023, TMLR, and ICLR 2023, making it a treasure trove of detailed feedback, critical reasoning, and structured academic insights. Our experiments showed that this dataset increased long context ability of phi-3 models! + +🌟 Key Highlights: + + • Expert Reviews: Aggregated from 3–6 reviews per paper, capturing the most insightful and constructive content. + • Rich Metadata: we have aggregated the reviews, and also included full parsed paper + • LLM Ready: Perfect for fine-tuning (We did dpo and sft) + +🎯 Use Cases: + + • Fine-tuning models with Direct Preference Optimization (DPO) and Supervised Fine-Tuning (SFT). + • Benchmarking zero-shot and long-context comprehension capabilities. + +🔗 Explore the dataset: https://huggingface.co/datasets/alexshengzhili/Abstract2Appendix_v1_10k + +This dataset is based on the methodology described in our recent paper, “Abstract2Appendix: Academic Reviews Enhance LLM Long-Context Capabilities”. Check it out for more details! https://arxiv.org/abs/2411.05232",[],[],"[{'reaction': '🔥', 'users': ['alexshengzhili', 'John6666'], 'count': 2}]",2024-11-15 22:38:40,2024-11-17 22:37:01.927,[],/posts/alexshengzhili/580108358026949,1199,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,683667017155458,"[{'type': 'text', 'value': ""🤯 AI progress keeps blowing my mind! Just experienced Qwen's new Coder demo - built a complete flashcard web app with a single prompt. The results are incredible!"", 'raw': ""🤯 AI progress keeps blowing my mind! Just experienced Qwen's new Coder demo - built a complete flashcard web app with a single prompt. The results are incredible!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This demo is part of the new Qwen2.5 Coder family (0.5B to 32B models), surpassing/matching GPT4o and Claude Sonnet 3.5 across multiple coding benchmarks.', 'raw': 'This demo is part of the new Qwen2.5 Coder family (0.5B to 32B models), surpassing/matching GPT4o and Claude Sonnet 3.5 across multiple coding benchmarks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 128K context window for 14B/32B models ', 'raw': '- 128K context window for 14B/32B models '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Drop-in replacement for GPT-4 in Cursor & Artifacts ', 'raw': '- Drop-in replacement for GPT-4 in Cursor & Artifacts '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Models on the Hub under Apache 2.0 license', 'raw': '- Models on the Hub under Apache 2.0 license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Try it yourself: ', 'raw': '🔗 Try it yourself: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Qwen/Qwen2.5-Coder-Artifacts'}, 'url': 'https://huggingface.co/spaces/Qwen/Qwen2.5-Coder-Artifacts', 'raw': 'https://huggingface.co/spaces/Qwen/Qwen2.5-Coder-Artifacts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is democratization of coding in real-time. Excited to see AI tools becoming more capable and accessible.', 'raw': 'This is democratization of coding in real-time. Excited to see AI tools becoming more capable and accessible.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What would you build with this? Share your ideas below! 👇', 'raw': 'What would you build with this? Share your ideas below! 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #Programming #TechInnovation #OpenSource #SoftwareDevelopment', 'raw': '#AI #Programming #TechInnovation #OpenSource #SoftwareDevelopment'}]","🤯 AI progress keeps blowing my mind! Just experienced Qwen's new Coder demo - built a complete flashcard web app with a single prompt. The results are incredible! + +This demo is part of the new Qwen2.5 Coder family (0.5B to 32B models), surpassing/matching GPT4o and Claude Sonnet 3.5 across multiple coding benchmarks. + +- 128K context window for 14B/32B models +- Drop-in replacement for GPT-4 in Cursor & Artifacts +- Models on the Hub under Apache 2.0 license + +🔗 Try it yourself: https://huggingface.co/spaces/Qwen/Qwen2.5-Coder-Artifacts + +This is democratization of coding in real-time. Excited to see AI tools becoming more capable and accessible. + +What would you build with this? Share your ideas below! 👇 + +#AI #Programming #TechInnovation #OpenSource #SoftwareDevelopment","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/LFdS-AkABNYY_ZBkJxFIl.mp4'}]",[],"[{'reaction': '👍', 'users': ['giseldo', 'John6666', 'abdeljalilELmajjodi', 'mrlive02'], 'count': 4}, {'reaction': '🤝', 'users': ['ednsinf', 'Giuliano'], 'count': 2}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-11 21:22:17,2024-11-25 10:29:08.267,"[{'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '672a089286503ffe1cdb5e40', 'avatarUrl': '/avatars/04bf4120ded5690c1aa08020261cefd5.svg', 'fullname': 'Jayasurya Mailsamy', 'name': 'oksurya', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/fdaudens/683667017155458,2244,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,391290114296515,"[{'type': 'text', 'value': '🎵 Introducing Suno Music Generation Dataset - ', 'raw': '🎵 Introducing Suno Music Generation Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/suno'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/suno', 'raw': 'https://huggingface.co/datasets/nyuuzyou/suno'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset highlights:', 'raw': 'Dataset highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 659,788 AI-generated music samples with comprehensive metadata from suno.com', 'raw': '- 659,788 AI-generated music samples with comprehensive metadata from suno.com'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual content with English as primary language, including Japanese and other languages', 'raw': '- Multilingual content with English as primary language, including Japanese and other languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Each entry contains rich metadata including:', 'raw': '- Each entry contains rich metadata including:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Unique song ID, audio/video URLs, and thumbnail images', 'raw': ' - Unique song ID, audio/video URLs, and thumbnail images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - AI model version and generation parameters', 'raw': ' - AI model version and generation parameters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Song metadata (tags, prompts, duration)', 'raw': ' - Song metadata (tags, prompts, duration)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Creator information and engagement metrics', 'raw': ' - Creator information and engagement metrics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Released to the public domain under Creative Commons Zero (CC0) license', 'raw': '- Released to the public domain under Creative Commons Zero (CC0) license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The dataset structure includes detailed information about each generated piece, from technical parameters to user engagement metrics, making it particularly valuable for:', 'raw': 'The dataset structure includes detailed information about each generated piece, from technical parameters to user engagement metrics, making it particularly valuable for:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Music generation model training', 'raw': '- Music generation model training'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cross-modal analysis (text-to-audio relationships)', 'raw': '- Cross-modal analysis (text-to-audio relationships)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- User engagement studies', 'raw': '- User engagement studies'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Audio classification tasks', 'raw': '- Audio classification tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Music style and genre analysis', 'raw': '- Music style and genre analysis'}]","🎵 Introducing Suno Music Generation Dataset - https://huggingface.co/datasets/nyuuzyou/suno + +Dataset highlights: + +- 659,788 AI-generated music samples with comprehensive metadata from suno.com +- Multilingual content with English as primary language, including Japanese and other languages +- Each entry contains rich metadata including: + - Unique song ID, audio/video URLs, and thumbnail images + - AI model version and generation parameters + - Song metadata (tags, prompts, duration) + - Creator information and engagement metrics +- Released to the public domain under Creative Commons Zero (CC0) license + +The dataset structure includes detailed information about each generated piece, from technical parameters to user engagement metrics, making it particularly valuable for: +- Music generation model training +- Cross-modal analysis (text-to-audio relationships) +- User engagement studies +- Audio classification tasks +- Music style and genre analysis",[],[],"[{'reaction': '👍', 'users': ['Latyrine', 'John6666', 'Ar4ikov', 'Ryukijano', 'V65696C', 'KillerShoaib'], 'count': 6}, {'reaction': '❤️', 'users': ['John6666', 'gabelev', 'gyger'], 'count': 3}, {'reaction': '🚀', 'users': ['John6666', 'Ryukijano'], 'count': 2}]",2024-11-11 19:08:36,2024-11-11 19:08:36.326,[],/posts/nyuuzyou/391290114296515,2570,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,657270848213422,"[{'type': 'text', 'value': '𝗤𝘄𝗲𝗻𝟮.𝟱-𝗖𝗼𝗱𝗲𝗿-𝟯𝟮𝗕: 𝗻𝗲𝘄 𝗯𝗲𝘀𝘁-𝗶𝗻-𝗰𝗹𝗮𝘀𝘀 𝗼𝗽𝗲𝗻 𝗰𝗼𝗱𝗶𝗻𝗴 𝗺𝗼𝗱𝗲𝗹, 𝗯𝗲𝗮𝘁𝘀 𝗚𝗣𝗧-𝟰𝗼 𝗼𝗻 𝗺𝗼𝘀𝘁 𝗰𝗼𝗱𝗶𝗻𝗴 𝗯𝗲𝗻𝗰𝗵𝗺𝗮𝗿𝗸𝘀!💥', 'raw': '𝗤𝘄𝗲𝗻𝟮.𝟱-𝗖𝗼𝗱𝗲𝗿-𝟯𝟮𝗕: 𝗻𝗲𝘄 𝗯𝗲𝘀𝘁-𝗶𝗻-𝗰𝗹𝗮𝘀𝘀 𝗼𝗽𝗲𝗻 𝗰𝗼𝗱𝗶𝗻𝗴 𝗺𝗼𝗱𝗲𝗹, 𝗯𝗲𝗮𝘁𝘀 𝗚𝗣𝗧-𝟰𝗼 𝗼𝗻 𝗺𝗼𝘀𝘁 𝗰𝗼𝗱𝗶𝗻𝗴 𝗯𝗲𝗻𝗰𝗵𝗺𝗮𝗿𝗸𝘀!💥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""💪 It's the first time Open-Source coding model of this size class that clearly matches GPT-4o's coding capabilities!"", 'raw': ""💪 It's the first time Open-Source coding model of this size class that clearly matches GPT-4o's coding capabilities!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Completes the previous two Qwen 2.5 Coder release with 4 new size: 0.5B, 3B, 14B, 32B', 'raw': '✨ Completes the previous two Qwen 2.5 Coder release with 4 new size: 0.5B, 3B, 14B, 32B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Support long context up to 128K (for the 14B and 32B models)', 'raw': '📚 Support long context up to 128K (for the 14B and 32B models)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Drop-in replacement to GPT-4o as a coding assistant on Cursor or for Artifacts!', 'raw': '✅ Drop-in replacement to GPT-4o as a coding assistant on Cursor or for Artifacts!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 Models available right now on the Hub, under Apache 2.0 license!', 'raw': '🤗 Models available right now on the Hub, under Apache 2.0 license!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'They have setup a crazy Artifacts demo, you should go have a look!', 'raw': 'They have setup a crazy Artifacts demo, you should go have a look!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Qwen/Qwen2.5-Coder-Artifacts'}, 'url': 'https://huggingface.co/spaces/Qwen/Qwen2.5-Coder-Artifacts', 'raw': 'https://huggingface.co/spaces/Qwen/Qwen2.5-Coder-Artifacts'}]","𝗤𝘄𝗲𝗻𝟮.𝟱-𝗖𝗼𝗱𝗲𝗿-𝟯𝟮𝗕: 𝗻𝗲𝘄 𝗯𝗲𝘀𝘁-𝗶𝗻-𝗰𝗹𝗮𝘀𝘀 𝗼𝗽𝗲𝗻 𝗰𝗼𝗱𝗶𝗻𝗴 𝗺𝗼𝗱𝗲𝗹, 𝗯𝗲𝗮𝘁𝘀 𝗚𝗣𝗧-𝟰𝗼 𝗼𝗻 𝗺𝗼𝘀𝘁 𝗰𝗼𝗱𝗶𝗻𝗴 𝗯𝗲𝗻𝗰𝗵𝗺𝗮𝗿𝗸𝘀!💥 + +💪 It's the first time Open-Source coding model of this size class that clearly matches GPT-4o's coding capabilities! + +✨ Completes the previous two Qwen 2.5 Coder release with 4 new size: 0.5B, 3B, 14B, 32B +📚 Support long context up to 128K (for the 14B and 32B models) +✅ Drop-in replacement to GPT-4o as a coding assistant on Cursor or for Artifacts! +🤗 Models available right now on the Hub, under Apache 2.0 license! + +They have setup a crazy Artifacts demo, you should go have a look! +👉 https://huggingface.co/spaces/Qwen/Qwen2.5-Coder-Artifacts","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/E-946xCafuXoLa3qRr2DJ.png'}]",[],"[{'reaction': '🔥', 'users': ['Nymbo', 'Norm', 'gatorand', 'AtAndDev', 'apeSh1t', 'aliyansayz', 'splevine', 'wajeehulhassan', 'yaaaaam1', 'louisbrulenaudet', 'xi0v', 'Chroma111'], 'count': 12}, {'reaction': '🚀', 'users': ['Nymbo', 'John6666', 'AtAndDev', 'yaaaaam1', 'xi0v', 'Chroma111'], 'count': 6}, {'reaction': '🤗', 'users': ['Chroma111'], 'count': 1}]",2024-11-11 18:33:40,2024-11-11 18:33:40.664,[],/posts/m-ric/657270848213422,3186,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,254681486405618,"[{'type': 'text', 'value': '𝗔𝗿𝗲 𝘀𝗰𝗮𝗹𝗶𝗻𝗴 𝗹𝗮𝘄𝘀 𝗼𝘃𝗲𝗿? 𝗔 𝗿𝗲𝗽𝗼𝗿𝘁 𝗳𝗿𝗼𝗺 𝘁𝗵𝗲 𝗜𝗻𝗳𝗼𝗿𝗺𝗮𝘁𝗶𝗼𝗻 𝗮𝗻𝗻𝗼𝘂𝗻𝗰𝗲𝗱 𝘁𝗵𝗮𝘁 𝗢𝗽𝗲𝗻𝗔𝗜 𝗶𝘀 𝘀𝗲𝗲𝗶𝗻𝗴 𝗱𝗶𝗺𝗶𝗻𝗶𝘀𝗵𝗶𝗻𝗴 𝗿𝗲𝘁𝘂𝗿𝗻𝘀 𝗳𝗿𝗼𝗺 𝘀𝗰𝗮𝗹𝗶𝗻𝗴 𝘂𝗽 𝘁𝗵𝗲 𝗻𝗲𝘅𝘁 𝗚𝗣𝗧 𝗺𝗼𝗱𝗲𝗹𝘀.', 'raw': '𝗔𝗿𝗲 𝘀𝗰𝗮𝗹𝗶𝗻𝗴 𝗹𝗮𝘄𝘀 𝗼𝘃𝗲𝗿? 𝗔 𝗿𝗲𝗽𝗼𝗿𝘁 𝗳𝗿𝗼𝗺 𝘁𝗵𝗲 𝗜𝗻𝗳𝗼𝗿𝗺𝗮𝘁𝗶𝗼𝗻 𝗮𝗻𝗻𝗼𝘂𝗻𝗰𝗲𝗱 𝘁𝗵𝗮𝘁 𝗢𝗽𝗲𝗻𝗔𝗜 𝗶𝘀 𝘀𝗲𝗲𝗶𝗻𝗴 𝗱𝗶𝗺𝗶𝗻𝗶𝘀𝗵𝗶𝗻𝗴 𝗿𝗲𝘁𝘂𝗿𝗻𝘀 𝗳𝗿𝗼𝗺 𝘀𝗰𝗮𝗹𝗶𝗻𝗴 𝘂𝗽 𝘁𝗵𝗲 𝗻𝗲𝘅𝘁 𝗚𝗣𝗧 𝗺𝗼𝗱𝗲𝗹𝘀.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 What are scaling laws? These are empiric laws that say ""Every time you increase compute spent in training 10-fold, your LLM\'s performance will go up by a predictable tick"". Of course, they apply only if you train your model with the right methods.', 'raw': '📊 What are scaling laws? These are empiric laws that say ""Every time you increase compute spent in training 10-fold, your LLM\'s performance will go up by a predictable tick"". Of course, they apply only if you train your model with the right methods.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The image below illustrates it: they\'re from a paper by Google, ""Scaling Autoregressive Models for Content-Rich Text-to-Image Generation"", and they show how quality and instruction following of models improve when you scale the model up (which is equivalent to scaling up the compute spent in training).', 'raw': 'The image below illustrates it: they\'re from a paper by Google, ""Scaling Autoregressive Models for Content-Rich Text-to-Image Generation"", and they show how quality and instruction following of models improve when you scale the model up (which is equivalent to scaling up the compute spent in training).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ These scaling laws have immense impact: they triggered the largest gold rush ever, with companies pouring billions into scaling up theiur training. Microsoft and OpenAI spent 100B into their ""Startgate"" mega training cluster, due to start running in 2028.', 'raw': '➡️ These scaling laws have immense impact: they triggered the largest gold rush ever, with companies pouring billions into scaling up theiur training. Microsoft and OpenAI spent 100B into their ""Startgate"" mega training cluster, due to start running in 2028.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤔 So, what about these reports of scaling laws slowing down?', 'raw': '🤔 So, what about these reports of scaling laws slowing down?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If they are true, they would mean a gigantic paradigm shift, as the hundreds of billions poured by AI companies into scaling could be a dead-end. ⛔️', 'raw': 'If they are true, they would mean a gigantic paradigm shift, as the hundreds of billions poured by AI companies into scaling could be a dead-end. ⛔️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But I doubt it: until the most recent publications, scaling laws showed no signs of weakness, and the researchers at the higher end of the scale-up seems to imply the scaling up continues. ', 'raw': 'But I doubt it: until the most recent publications, scaling laws showed no signs of weakness, and the researchers at the higher end of the scale-up seems to imply the scaling up continues. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Wait and see!', 'raw': 'Wait and see!'}]","𝗔𝗿𝗲 𝘀𝗰𝗮𝗹𝗶𝗻𝗴 𝗹𝗮𝘄𝘀 𝗼𝘃𝗲𝗿? 𝗔 𝗿𝗲𝗽𝗼𝗿𝘁 𝗳𝗿𝗼𝗺 𝘁𝗵𝗲 𝗜𝗻𝗳𝗼𝗿𝗺𝗮𝘁𝗶𝗼𝗻 𝗮𝗻𝗻𝗼𝘂𝗻𝗰𝗲𝗱 𝘁𝗵𝗮𝘁 𝗢𝗽𝗲𝗻𝗔𝗜 𝗶𝘀 𝘀𝗲𝗲𝗶𝗻𝗴 𝗱𝗶𝗺𝗶𝗻𝗶𝘀𝗵𝗶𝗻𝗴 𝗿𝗲𝘁𝘂𝗿𝗻𝘀 𝗳𝗿𝗼𝗺 𝘀𝗰𝗮𝗹𝗶𝗻𝗴 𝘂𝗽 𝘁𝗵𝗲 𝗻𝗲𝘅𝘁 𝗚𝗣𝗧 𝗺𝗼𝗱𝗲𝗹𝘀. + +📊 What are scaling laws? These are empiric laws that say ""Every time you increase compute spent in training 10-fold, your LLM's performance will go up by a predictable tick"". Of course, they apply only if you train your model with the right methods. + +The image below illustrates it: they're from a paper by Google, ""Scaling Autoregressive Models for Content-Rich Text-to-Image Generation"", and they show how quality and instruction following of models improve when you scale the model up (which is equivalent to scaling up the compute spent in training). + +➡️ These scaling laws have immense impact: they triggered the largest gold rush ever, with companies pouring billions into scaling up theiur training. Microsoft and OpenAI spent 100B into their ""Startgate"" mega training cluster, due to start running in 2028. + +🤔 So, what about these reports of scaling laws slowing down? + +If they are true, they would mean a gigantic paradigm shift, as the hundreds of billions poured by AI companies into scaling could be a dead-end. ⛔️ + +But I doubt it: until the most recent publications, scaling laws showed no signs of weakness, and the researchers at the higher end of the scale-up seems to imply the scaling up continues. + +Wait and see!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/8cVKtpHXPFt15-uogguTk.png'}]",[],"[{'reaction': '🔥', 'users': ['Smorty100', 'Viewegger'], 'count': 2}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-11 17:51:50,2024-11-13 08:35:45.216,"[{'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/m-ric/254681486405618,794,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d00458fff501149572827f/E6nxYRxqRmBGRf9wSQq4Y.jpeg,107.0,Sami Halawa,samihalawa,444428540739993,"[{'type': 'text', 'value': 'Best Debug Prompt ', 'raw': 'Best Debug Prompt '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You are a frustrated user who has tested this application extensively. Your job is to list EVERY possible way this app could completely break or become unusable.', 'raw': 'You are a frustrated user who has tested this application extensively. Your job is to list EVERY possible way this app could completely break or become unusable.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For each potential failure:', 'raw': 'For each potential failure:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. What would make you say ""This app is totally broken!""?', 'raw': '1. What would make you say ""This app is totally broken!""?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. What exact steps did you take when it broke?', 'raw': '2. What exact steps did you take when it broke?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. What did you see on your screen when it broke?', 'raw': '3. What did you see on your screen when it broke?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. How angry would this make a typical user (1-10)?', 'raw': '4. How angry would this make a typical user (1-10)?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. What would you expect the app to do instead?', 'raw': '5. What would you expect the app to do instead?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Think about:', 'raw': 'Think about:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What happens if you click buttons really fast?', 'raw': '- What happens if you click buttons really fast?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if your internet is slow/disconnected?', 'raw': '- What if your internet is slow/disconnected?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if you upload weird files/images?', 'raw': '- What if you upload weird files/images?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if you try to break the app on purpose?', 'raw': '- What if you try to break the app on purpose?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if multiple people use it at once?', 'raw': '- What if multiple people use it at once?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if you use it on mobile/tablet?', 'raw': '- What if you use it on mobile/tablet?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- What if you refresh/navigate while it's working?"", 'raw': ""- What if you refresh/navigate while it's working?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if you paste invalid inputs?', 'raw': '- What if you paste invalid inputs?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if you upload HUGE files?', 'raw': '- What if you upload HUGE files?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What if you leave it running overnight?', 'raw': '- What if you leave it running overnight?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Don't worry about being technical - just describe what you saw break as a user."", 'raw': ""Don't worry about being technical - just describe what you saw break as a user.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Format each issue like:', 'raw': 'Format each issue like:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ISSUE #1: [Brief angry user description]', 'raw': 'ISSUE #1: [Brief angry user description]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- STEPS TO BREAK IT: [Exactly what you did]', 'raw': '- STEPS TO BREAK IT: [Exactly what you did]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- WHAT HAPPENED: [What you saw]', 'raw': '- WHAT HAPPENED: [What you saw]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ANGER LEVEL: [1-10]', 'raw': '- ANGER LEVEL: [1-10]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- EXPECTED: [What should happen]', 'raw': '- EXPECTED: [What should happen]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Keep going until you've found every possible way to break this app from a user's perspective!"", 'raw': ""Keep going until you've found every possible way to break this app from a user's perspective!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After outpuiting the list, accoring to the list optmiced Composer edit block to fix the ones severe that make sense to adjust accoirng to gradio limitations and current usage target )dont suppose we need unecessary funcitons)', 'raw': 'After outpuiting the list, accoring to the list optmiced Composer edit block to fix the ones severe that make sense to adjust accoirng to gradio limitations and current usage target )dont suppose we need unecessary funcitons)'}]","Best Debug Prompt + +You are a frustrated user who has tested this application extensively. Your job is to list EVERY possible way this app could completely break or become unusable. + +For each potential failure: + +1. What would make you say ""This app is totally broken!""? +2. What exact steps did you take when it broke? +3. What did you see on your screen when it broke? +4. How angry would this make a typical user (1-10)? +5. What would you expect the app to do instead? + +Think about: +- What happens if you click buttons really fast? +- What if your internet is slow/disconnected? +- What if you upload weird files/images? +- What if you try to break the app on purpose? +- What if multiple people use it at once? +- What if you use it on mobile/tablet? +- What if you refresh/navigate while it's working? +- What if you paste invalid inputs? +- What if you upload HUGE files? +- What if you leave it running overnight? + +Don't worry about being technical - just describe what you saw break as a user. + +Format each issue like: + +ISSUE #1: [Brief angry user description] +- STEPS TO BREAK IT: [Exactly what you did] +- WHAT HAPPENED: [What you saw] +- ANGER LEVEL: [1-10] +- EXPECTED: [What should happen] + +Keep going until you've found every possible way to break this app from a user's perspective! + +After outpuiting the list, accoring to the list optmiced Composer edit block to fix the ones severe that make sense to adjust accoirng to gradio limitations and current usage target )dont suppose we need unecessary funcitons)",[],[],"[{'reaction': '👀', 'users': ['John6666', 'xi0v', 'AtAndDev', 'nroggendorff'], 'count': 4}, {'reaction': '😎', 'users': ['TouchNight', 'AtAndDev', 'mlmPenguin'], 'count': 3}, {'reaction': '👍', 'users': ['samihalawa', 'Nymbo'], 'count': 2}]",2024-11-11 17:27:35,2024-11-11 17:27:35.284,[],/posts/samihalawa/444428540739993,2325,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png,2266.0,Tom Aarsen,tomaarsen,232920597638334,"[{'type': 'text', 'value': ""I just released Sentence Transformers v3.3.0 & it's huge! 4.5x speedup for CPU with OpenVINO int8 static quantization, training with prompts for a free perf. boost, PEFT integration, evaluation on NanoBEIR, and more! Details:"", 'raw': ""I just released Sentence Transformers v3.3.0 & it's huge! 4.5x speedup for CPU with OpenVINO int8 static quantization, training with prompts for a free perf. boost, PEFT integration, evaluation on NanoBEIR, and more! Details:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""1. We integrate Post-Training Static Quantization using OpenVINO, a very efficient solution for CPUs that processes 4.78x as many texts per second on average, while only hurting performance by 0.36% on average. There's a new "", 'raw': ""1. We integrate Post-Training Static Quantization using OpenVINO, a very efficient solution for CPUs that processes 4.78x as many texts per second on average, while only hurting performance by 0.36% on average. There's a new ""}, {'type': 'inline_code', 'code': 'export_static_quantized_openvino_model', 'raw': '`export_static_quantized_openvino_model`'}, {'type': 'text', 'value': ' method to quantize a model.', 'raw': ' method to quantize a model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. We add the option to train with prompts, e.g. strings like ""query: "", ""search_document: "" or ""Represent this sentence for searching relevant passages: "". It\'s as simple as using the ', 'raw': '2. We add the option to train with prompts, e.g. strings like ""query: "", ""search_document: "" or ""Represent this sentence for searching relevant passages: "". It\'s as simple as using the '}, {'type': 'inline_code', 'code': 'prompts', 'raw': '`prompts`'}, {'type': 'text', 'value': ' argument in ', 'raw': ' argument in '}, {'type': 'inline_code', 'code': 'SentenceTransformerTrainingArguments', 'raw': '`SentenceTransformerTrainingArguments`'}, {'type': 'text', 'value': '. Our experiments show that you can easily reach 0.66% to 0.90% relative performance improvement on NDCG@10 at no extra cost by adding ""query: "" before each training query and ""document: "" before each training answer.', 'raw': '. Our experiments show that you can easily reach 0.66% to 0.90% relative performance improvement on NDCG@10 at no extra cost by adding ""query: "" before each training query and ""document: "" before each training answer.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""3. Sentence Transformers now supports training PEFT adapters via 7 new methods for adding new adapters or loading pre-trained ones. You can also directly load a trained adapter with SentenceTransformer as if it's a normal model. Very useful for e.g. 1) training multiple adapters on 1 base model, 2) training bigger models than otherwise possible, or 3) cheaply hosting multiple models by switching multiple adapters on 1 base model."", 'raw': ""3. Sentence Transformers now supports training PEFT adapters via 7 new methods for adding new adapters or loading pre-trained ones. You can also directly load a trained adapter with SentenceTransformer as if it's a normal model. Very useful for e.g. 1) training multiple adapters on 1 base model, 2) training bigger models than otherwise possible, or 3) cheaply hosting multiple models by switching multiple adapters on 1 base model.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""4. We added easy evaluation on NanoBEIR, a subset of BEIR a.k.a. the MTEB Retrieval benchmark. It contains 13 datasets with 50 queries and up to 10k documents each. Evaluation is fast, and can easily be done during training to track your model's performance on general-purpose information retrieval tasks."", 'raw': ""4. We added easy evaluation on NanoBEIR, a subset of BEIR a.k.a. the MTEB Retrieval benchmark. It contains 13 datasets with 50 queries and up to 10k documents each. Evaluation is fast, and can easily be done during training to track your model's performance on general-purpose information retrieval tasks.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Additionally, we also deprecate Python 3.8, add better compatibility with Transformers v4.46.0, and more. Read the full release notes here: ', 'raw': 'Additionally, we also deprecate Python 3.8, add better compatibility with Transformers v4.46.0, and more. Read the full release notes here: '}, {'type': 'link', 'href': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v3.3.0', 'raw': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v3.3.0'}]","I just released Sentence Transformers v3.3.0 & it's huge! 4.5x speedup for CPU with OpenVINO int8 static quantization, training with prompts for a free perf. boost, PEFT integration, evaluation on NanoBEIR, and more! Details: + +1. We integrate Post-Training Static Quantization using OpenVINO, a very efficient solution for CPUs that processes 4.78x as many texts per second on average, while only hurting performance by 0.36% on average. There's a new `export_static_quantized_openvino_model` method to quantize a model. + +2. We add the option to train with prompts, e.g. strings like ""query: "", ""search_document: "" or ""Represent this sentence for searching relevant passages: "". It's as simple as using the `prompts` argument in `SentenceTransformerTrainingArguments`. Our experiments show that you can easily reach 0.66% to 0.90% relative performance improvement on NDCG@10 at no extra cost by adding ""query: "" before each training query and ""document: "" before each training answer. + +3. Sentence Transformers now supports training PEFT adapters via 7 new methods for adding new adapters or loading pre-trained ones. You can also directly load a trained adapter with SentenceTransformer as if it's a normal model. Very useful for e.g. 1) training multiple adapters on 1 base model, 2) training bigger models than otherwise possible, or 3) cheaply hosting multiple models by switching multiple adapters on 1 base model. + +4. We added easy evaluation on NanoBEIR, a subset of BEIR a.k.a. the MTEB Retrieval benchmark. It contains 13 datasets with 50 queries and up to 10k documents each. Evaluation is fast, and can easily be done during training to track your model's performance on general-purpose information retrieval tasks. + +Additionally, we also deprecate Python 3.8, add better compatibility with Transformers v4.46.0, and more. Read the full release notes here: https://github.com/UKPLab/sentence-transformers/releases/tag/v3.3.0","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/4xE8txYbHbzViuTgbujdQ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/nk4Dnsz-TdImqy5F_pe-N.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/bu8ozWM9-O0d5d1KOi80H.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/jgOvDirvliwNenv8fX-kf.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/SXRbsjVkOyuK0IYaF5F6w.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/gKY7Y5w0ZL8cf_IKjnUwE.png'}]",[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'John6666', 'iojvsuynv', 'Mdubbya', 'eriknovak', 'den0620', 'DmitryRyumin', 'csabakecskemeti', 'andreagemelli', 'praga95', 'abdouaziiz', 'splevine', 'bayang', 'ai-everyday', 'ArthurGprog'], 'count': 15}, {'reaction': '👍', 'users': ['mlabonne', 'm36', 'abdouaziiz', 'souze8', 'sebasmos'], 'count': 5}]",2024-11-11 12:24:17,2024-11-11 12:24:33.295,[],/posts/tomaarsen/232920597638334,5848,,0 +/avatars/7be1913712fdd1ffe75967ed19007720.svg,16.0,stock mining,automatedstockminingorg,830550648952715,"[{'type': 'text', 'value': ""Hi everyone, i have been trying to give my chatbots access to the web for a long while now, i have tried using the google search api, taking the links and then scraping them, but it does'nt work that well. does anyone know how you can give a chatbot access to google/the web, so that it has access to current data."", 'raw': ""Hi everyone, i have been trying to give my chatbots access to the web for a long while now, i have tried using the google search api, taking the links and then scraping them, but it does'nt work that well. does anyone know how you can give a chatbot access to google/the web, so that it has access to current data.""}]","Hi everyone, i have been trying to give my chatbots access to the web for a long while now, i have tried using the google search api, taking the links and then scraping them, but it does'nt work that well. does anyone know how you can give a chatbot access to google/the web, so that it has access to current data.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'devenbiz4group', 'SOHAIBK1', 'ai-everyday'], 'count': 4}]",2024-11-11 06:55:54,2024-11-12 22:46:51.468,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '66edaf0ae67e0b3a0f42aaba', 'avatarUrl': '/avatars/7be1913712fdd1ffe75967ed19007720.svg', 'fullname': 'stock mining', 'name': 'automatedstockminingorg', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 16, 'isFollowing': False}, {'_id': '630d5f5954c3dbd48059e614', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/630d5f5954c3dbd48059e614/x-vcRnBKOFuVjSSWQeuIB.png', 'fullname': 'Blake Donahoo', 'name': 'deathstarenterprise', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/automatedstockminingorg/830550648952715,2668,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,391224485870515,"[{'type': 'text', 'value': 'GRID-6X : Layout for Seamless Image Assembly 🔥', 'raw': 'GRID-6X : Layout for Seamless Image Assembly 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🪨Demo: ', 'raw': '🪨Demo: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/prithivMLmods/GRID-6X', 'raw': 'https://huggingface.co/spaces/prithivMLmods/GRID-6X'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🪨Doc / Blog: ', 'raw': '🪨Doc / Blog: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/prithivMLmods/grid-6x', 'raw': 'https://huggingface.co/blog/prithivMLmods/grid-6x'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the ', 'raw': 'In the '}, {'type': 'inline_code', 'code': 'infer', 'raw': '`infer`'}, {'type': 'text', 'value': ' function:', 'raw': ' function:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'python', 'code': ""grid_img = Image.new('RGB', (width * grid_size_x, height * grid_size_y))\nfor i, img in enumerate(result.images[:num_images]):\n grid_img.paste(img, (i % grid_size_x * width, i // grid_size_x * height))"", 'raw': ""```python\ngrid_img = Image.new('RGB', (width * grid_size_x, height * grid_size_y))\nfor i, img in enumerate(result.images[:num_images]):\n grid_img.paste(img, (i % grid_size_x * width, i // grid_size_x * height))\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. **Image Initialization**: ', 'raw': '1. **Image Initialization**: '}, {'type': 'inline_code', 'code': 'grid_img', 'raw': '`grid_img`'}, {'type': 'text', 'value': ' is a blank canvas that will hold the images in a grid format.', 'raw': ' is a blank canvas that will hold the images in a grid format.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. **Image Placement**: Images are pasted onto the canvas using a loop:', 'raw': '2. **Image Placement**: Images are pasted onto the canvas using a loop:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Horizontal Position**: ', 'raw': ' - **Horizontal Position**: '}, {'type': 'inline_code', 'code': '(i % grid_size_x) * width', 'raw': '`(i % grid_size_x) * width`'}, {'type': 'text', 'value': ' calculates the x-coordinate.', 'raw': ' calculates the x-coordinate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **Vertical Position**: ', 'raw': ' - **Vertical Position**: '}, {'type': 'inline_code', 'code': '(i // grid_size_x) * height', 'raw': '`(i // grid_size_x) * height`'}, {'type': 'text', 'value': ' calculates the y-coordinate.', 'raw': ' calculates the y-coordinate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. **Grid Size Selection**: The user selects the grid size from options like ""2x1"", ""1x2"", ""2x2"", ""2x3"", ""3x2"", and ""1x1"". Each option corresponds to the arrangement of images:', 'raw': '1. **Grid Size Selection**: The user selects the grid size from options like ""2x1"", ""1x2"", ""2x2"", ""2x3"", ""3x2"", and ""1x1"". Each option corresponds to the arrangement of images:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **2x1**: 2 images in a single row', 'raw': ' - **2x1**: 2 images in a single row'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **1x2**: 1 image in two rows (column layout)', 'raw': ' - **1x2**: 1 image in two rows (column layout)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **2x2**: 2 rows with 2 images each', 'raw': ' - **2x2**: 2 rows with 2 images each'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **2x3**: 2 rows with 3 images each', 'raw': ' - **2x3**: 2 rows with 3 images each'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **3x2**: 3 rows with 2 images each', 'raw': ' - **3x2**: 3 rows with 2 images each'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - **1x1**: A single image (default)', 'raw': ' - **1x1**: A single image (default)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. **Image Generation**: Based on the grid size selection, the app calculates the number of images to generate. For example:', 'raw': '2. **Image Generation**: Based on the grid size selection, the app calculates the number of images to generate. For example:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - If the grid size is ""2x2"", the app generates 4 images.', 'raw': ' - If the grid size is ""2x2"", the app generates 4 images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - For ""3x2"", it generates 6 images.', 'raw': ' - For ""3x2"", it generates 6 images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '-> Each option arranges images accordingly, providing flexibility in viewing multiple images in one output.', 'raw': '-> Each option arranges images accordingly, providing flexibility in viewing multiple images in one output.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '-> Added both of these spaces that support the GRID functionality Layout for Seamless Image Assembly : ', 'raw': '-> Added both of these spaces that support the GRID functionality Layout for Seamless Image Assembly : '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '----------', 'raw': '----------'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥IMAGINEO-4K: ', 'raw': '🔥IMAGINEO-4K: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K', 'raw': 'https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥GRID-6X: ', 'raw': '🔥GRID-6X: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/prithivMLmods/GRID-6X', 'raw': 'https://huggingface.co/spaces/prithivMLmods/GRID-6X'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '----------', 'raw': '----------'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.@prithivMLmods 🤗', 'raw': '.@prithivMLmods 🤗'}]","GRID-6X : Layout for Seamless Image Assembly 🔥 + +🪨Demo: https://huggingface.co/spaces/prithivMLmods/GRID-6X +🪨Doc / Blog: https://huggingface.co/blog/prithivMLmods/grid-6x + +In the `infer` function: +```python +grid_img = Image.new('RGB', (width * grid_size_x, height * grid_size_y)) +for i, img in enumerate(result.images[:num_images]): + grid_img.paste(img, (i % grid_size_x * width, i // grid_size_x * height)) +``` +1. **Image Initialization**: `grid_img` is a blank canvas that will hold the images in a grid format. +2. **Image Placement**: Images are pasted onto the canvas using a loop: + - **Horizontal Position**: `(i % grid_size_x) * width` calculates the x-coordinate. + - **Vertical Position**: `(i // grid_size_x) * height` calculates the y-coordinate. + +1. **Grid Size Selection**: The user selects the grid size from options like ""2x1"", ""1x2"", ""2x2"", ""2x3"", ""3x2"", and ""1x1"". Each option corresponds to the arrangement of images: + - **2x1**: 2 images in a single row + - **1x2**: 1 image in two rows (column layout) + - **2x2**: 2 rows with 2 images each + - **2x3**: 2 rows with 3 images each + - **3x2**: 3 rows with 2 images each + - **1x1**: A single image (default) + +2. **Image Generation**: Based on the grid size selection, the app calculates the number of images to generate. For example: + - If the grid size is ""2x2"", the app generates 4 images. + - For ""3x2"", it generates 6 images. + +-> Each option arranges images accordingly, providing flexibility in viewing multiple images in one output. + +-> Added both of these spaces that support the GRID functionality Layout for Seamless Image Assembly : + +---------- +🔥IMAGINEO-4K: https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K + +🔥GRID-6X: https://huggingface.co/spaces/prithivMLmods/GRID-6X +---------- +. +. +.@prithivMLmods 🤗","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/EqV9AAkxkczs083HRrS9a.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/8D3JFzobHsM5d7MDvI1eM.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/2hMMo763UIrjElHrVHq9B.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/z9vZQwdEMXWyiC4HhE1w5.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/yTI4MWizFc0E04kNo7XnU.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/uaqIIbkW3XZufHbuY9B4s.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/UZq_AZZbQ1OSyHqOWrMlo.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/wJjpXujFjKeDWKocbQ6vW.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ipspwn9DRNH6dS4IInhC2.webp'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'Danis146'], 'count': 2}, {'reaction': '🔥', 'users': ['civet789', 'prithivMLmods'], 'count': 2}, {'reaction': '❤️', 'users': ['civet789'], 'count': 1}, {'reaction': '😎', 'users': ['Vexia'], 'count': 1}, {'reaction': '👍', 'users': ['Kasnol'], 'count': 1}]",2024-11-11 05:55:50,2024-11-11 08:05:19.661,[],/posts/prithivMLmods/391224485870515,4049,,0 +/avatars/f32291df2054c1bb4a01889d1b41c0d5.svg,23.0,Christopher Schröder,cschroeder,684381844303681,"[{'type': 'text', 'value': '#EMNLP2024 is happening soon! Unfortunately, I will not be on site, but I will present our poster virtually on Wednesday, Nov 13 (7:45 EST / 13:45 CEST) in Virtual Poster Session 2.', 'raw': '#EMNLP2024 is happening soon! Unfortunately, I will not be on site, but I will present our poster virtually on Wednesday, Nov 13 (7:45 EST / 13:45 CEST) in Virtual Poster Session 2.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this work, we leverage self-training in an active learning loop in order to train small language models with even less data. Hope to see you there!', 'raw': 'In this work, we leverage self-training in an active learning loop in order to train small language models with even less data. Hope to see you there!'}, {'type': 'new_line', 'raw': '\n'}]","#EMNLP2024 is happening soon! Unfortunately, I will not be on site, but I will present our poster virtually on Wednesday, Nov 13 (7:45 EST / 13:45 CEST) in Virtual Poster Session 2. + +In this work, we leverage self-training in an active learning loop in order to train small language models with even less data. Hope to see you there! +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/625026749d39e8be3166132f/wnzWVPQYkfpaANZ1AOqig.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-10 22:49:02,2024-11-10 22:53:54.315,"[{'_id': '625026749d39e8be3166132f', 'avatarUrl': '/avatars/f32291df2054c1bb4a01889d1b41c0d5.svg', 'fullname': 'Christopher Schröder', 'name': 'cschroeder', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23, 'isFollowing': False}]",/posts/cschroeder/684381844303681,703,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/672e54a15bd9344d447932b8/iuCOjkLJyETdg7abe_NQt.jpeg,,Jonathan Payton,SaRaHAI2024,555944473411068,"[{'type': 'text', 'value': 'Hello everyone!!! I am new to this and a little out of my depth (aLOT out of my depth!! LOL!) I am looking through the site and wanted to ask if there were any quailty primers i should read? or a good basic getting started post?', 'raw': 'Hello everyone!!! I am new to this and a little out of my depth (aLOT out of my depth!! LOL!) I am looking through the site and wanted to ask if there were any quailty primers i should read? or a good basic getting started post?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks in advance!!', 'raw': 'Thanks in advance!!'}]","Hello everyone!!! I am new to this and a little out of my depth (aLOT out of my depth!! LOL!) I am looking through the site and wanted to ask if there were any quailty primers i should read? or a good basic getting started post? + +Thanks in advance!!",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-10 13:35:05,2024-11-12 22:54:51.149,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '6593502ca2607099284523db', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6593502ca2607099284523db/svxu-iTvwsgmiYNgaFGIN.png', 'fullname': 'William J. Marshall', 'name': 'fuzzy-mittenz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 44, 'isFollowing': False}, {'_id': '630d5f5954c3dbd48059e614', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/630d5f5954c3dbd48059e614/x-vcRnBKOFuVjSSWQeuIB.png', 'fullname': 'Blake Donahoo', 'name': 'deathstarenterprise', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/SaRaHAI2024/555944473411068,843,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,788696446784520,"[{'type': 'text', 'value': 'New Style, New Mix, New Drop 🧤', 'raw': 'New Style, New Mix, New Drop 🧤'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧨Flux LoRA DLC: ', 'raw': '🧨Flux LoRA DLC: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/FLUX-LoRA-DLC'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC', 'raw': 'https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎆Glowing-Body: ', 'raw': '🎆Glowing-Body: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Glowing-Body-Flux-LoRA'}, 'url': 'https://huggingface.co/prithivMLmods/Glowing-Body-Flux-LoRA', 'raw': 'https://huggingface.co/prithivMLmods/Glowing-Body-Flux-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎆Electric-Blue: ', 'raw': '🎆Electric-Blue: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Electric-Blue-Flux-LoRA'}, 'url': 'https://huggingface.co/prithivMLmods/Electric-Blue-Flux-LoRA', 'raw': 'https://huggingface.co/prithivMLmods/Electric-Blue-Flux-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎆Intense-Red: ', 'raw': '🎆Intense-Red: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Intense-Red-Flux-LoRA'}, 'url': 'https://huggingface.co/prithivMLmods/Intense-Red-Flux-LoRA', 'raw': 'https://huggingface.co/prithivMLmods/Intense-Red-Flux-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎆Clouds-Illusion: ', 'raw': '🎆Clouds-Illusion: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Clouds-Illusion-Flux-LoRA'}, 'url': 'https://huggingface.co/prithivMLmods/Clouds-Illusion-Flux-LoRA', 'raw': 'https://huggingface.co/prithivMLmods/Clouds-Illusion-Flux-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎆Digital-Yellow: ', 'raw': '🎆Digital-Yellow: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Digital-Yellow-Flux-LoRA'}, 'url': 'https://huggingface.co/prithivMLmods/Digital-Yellow-Flux-LoRA', 'raw': 'https://huggingface.co/prithivMLmods/Digital-Yellow-Flux-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧨Flux LoRA Collection: ', 'raw': '🧨Flux LoRA Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be'}, 'url': 'https://huggingface.co/collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be', 'raw': 'https://huggingface.co/collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'prithivMLmods', 'raw': '@prithivMLmods'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","New Style, New Mix, New Drop 🧤 + +🧨Flux LoRA DLC: https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC + +🎆Glowing-Body: https://huggingface.co/prithivMLmods/Glowing-Body-Flux-LoRA +🎆Electric-Blue: https://huggingface.co/prithivMLmods/Electric-Blue-Flux-LoRA +🎆Intense-Red: https://huggingface.co/prithivMLmods/Intense-Red-Flux-LoRA +🎆Clouds-Illusion: https://huggingface.co/prithivMLmods/Clouds-Illusion-Flux-LoRA +🎆Digital-Yellow: https://huggingface.co/prithivMLmods/Digital-Yellow-Flux-LoRA + +🧨Flux LoRA Collection: https://huggingface.co/collections/prithivMLmods/flux-lora-collections-66dd5908be2206cfaa8519be + +. +. +. +@prithivMLmods +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/VifKrKv_kxDWXE1dZLr6X.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/tksVqqDwdOz9tRyxLfdf3.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/w7duGI8p1Fg0WiNtxSIKy.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/LJX2q35mDOMvUa0BX5wsk.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/A2-omcVa3AhvGsyofrVka.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/6Fq-_PrfYRVny6acr6OQE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/Gg-8P9vHPW245kiQwdEri.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/vR5_M0P_C28KSlVjrDmIJ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/9mKOiMhK3oHaQR2kQFWyz.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/DsvuWyS_hH3kV4zfUSPWY.png'}]","[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957}]","[{'reaction': '❤️', 'users': ['Tonic', 'realaliarain', 'speedchemistry', 'JayNagose', 'prithivMLmods', 'multimodalart', 'radames', 'd8rt8v', 'Hamed744', 'faruqhrp', 's3nh', 'RGS199'], 'count': 12}, {'reaction': '👍', 'users': ['John6666', 'speedchemistry', 'Markjr', 'multimodalart', 'Rsln', 'prithivMLmods', 's3nh'], 'count': 7}, {'reaction': '🔥', 'users': ['jovialjoel', 'kimp-dev-ninja', 'zaidzameer010', 'prithivMLmods', 's3nh'], 'count': 5}, {'reaction': '👀', 'users': ['Tonic', 'multimodalart', 'prithivMLmods'], 'count': 3}, {'reaction': '🤝', 'users': ['prithivMLmods', 'louisbrulenaudet'], 'count': 2}, {'reaction': '🤗', 'users': ['prithivMLmods'], 'count': 1}]",2024-11-04 10:35:40,2024-11-05 07:56:16.114,[],/posts/prithivMLmods/788696446784520,5998,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,301983274684168,"[{'type': 'text', 'value': 'Vector Search (most) datasets on the Hugging Face Hub 🔦', 'raw': 'Vector Search (most) datasets on the Hugging Face Hub 🔦'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Powered by: Polars, DuckDB, Gradio and model2vec (lightning-fast embeddings by Stéphan Tulkens).', 'raw': 'Powered by: Polars, DuckDB, Gradio and model2vec (lightning-fast embeddings by Stéphan Tulkens).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Should work fast enough for datasets up to 100K.', 'raw': 'Should work fast enough for datasets up to 100K.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'davidberenstein1957/vectorsearch-hub-datasets'}, 'url': 'https://huggingface.co/spaces/davidberenstein1957/vectorsearch-hub-datasets', 'raw': 'https://huggingface.co/spaces/davidberenstein1957/vectorsearch-hub-datasets'}]","Vector Search (most) datasets on the Hugging Face Hub 🔦 + +Powered by: Polars, DuckDB, Gradio and model2vec (lightning-fast embeddings by Stéphan Tulkens). + +Should work fast enough for datasets up to 100K. + +https://huggingface.co/spaces/davidberenstein1957/vectorsearch-hub-datasets",[],[],"[{'reaction': '🤗', 'users': ['davidberenstein1957', 'Tonic', 'prithivMLmods', 'xi0v', 'Nymbo', 'djuna', 'thanhkt'], 'count': 7}, {'reaction': '🚀', 'users': ['davidberenstein1957', 'Tonic', 'xi0v', 'Nymbo', 'GoDjMike'], 'count': 5}, {'reaction': '👀', 'users': ['davidberenstein1957', 'John6666', 'Tonic', 'xi0v', 'Nymbo'], 'count': 5}]",2024-11-04 10:19:03,2024-11-04 10:19:03.151,[],/posts/davidberenstein1957/301983274684168,3104,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1635314457124-5f32b2367e583543386214d9.jpeg,26.0,Sergei Averkiev,averoo,419590652134553,"[{'type': 'text', 'value': 'Hello researchers! Here are scripts to generate reviews on HF Daily Papers:', 'raw': 'Hello researchers! Here are scripts to generate reviews on HF Daily Papers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://github.com/averkij/top_papers', 'raw': 'https://github.com/averkij/top_papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ Works on GitHub Actions', 'raw': '⚙️ Works on GitHub Actions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 Claude, GPT-4o, FLUX', 'raw': '🤖 Claude, GPT-4o, FLUX'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌏 Multiple languages', 'raw': '🌏 Multiple languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Classification by 38 topics (#agents, #multimodal, #plp, etc.)', 'raw': '📚 Classification by 38 topics (#agents, #multimodal, #plp, etc.)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔺 ', 'raw': '🔺 '}, {'type': 'link', 'href': 'https://HFday.ru', 'raw': 'https://HFday.ru'}]","Hello researchers! Here are scripts to generate reviews on HF Daily Papers: + +👉 https://github.com/averkij/top_papers + +⚙️ Works on GitHub Actions +🤖 Claude, GPT-4o, FLUX +🌏 Multiple languages +📚 Classification by 38 topics (#agents, #multimodal, #plp, etc.) +🔺 https://HFday.ru","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f32b2367e583543386214d9/eNrWiDtpRGgBk-aRBJCGX.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f32b2367e583543386214d9/-kPHSc3clhpzSvRTp_1PA.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-04 10:07:24,2024-11-04 10:08:40.111,[],/posts/averoo/419590652134553,484,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,808388125499602,"[{'type': 'text', 'value': ""It's work like this that in some way signal the eventual “dominance” of AI over all the sciences."", 'raw': ""It's work like this that in some way signal the eventual “dominance” of AI over all the sciences.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '“We train our model on the six-dimensional N-body phase space, predicting particle velocities as the time derivative of the model’s displacement outputs”', 'raw': '“We train our model on the six-dimensional N-body phase space, predicting particle velocities as the time derivative of the model’s displacement outputs”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The emulator is capable of predicting', 'raw': 'The emulator is capable of predicting'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'the nonlinear displacement and velocity fields for 128^3 particles in half a second on a single GPU🤯', 'raw': 'the nonlinear displacement and velocity fields for 128^3 particles in half a second on a single GPU🤯'}]","It's work like this that in some way signal the eventual “dominance” of AI over all the sciences. + +“We train our model on the six-dimensional N-body phase space, predicting particle velocities as the time derivative of the model’s displacement outputs” + +The emulator is capable of predicting +the nonlinear displacement and velocity fields for 128^3 particles in half a second on a single GPU🤯","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/yxh8K8-a8AR8Wyl0SYG4y.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/s7puQYqsEnc0SkSzs-FX7.qt'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/hRn-JwTMt1UB1uem_n_XJ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/HghFqpW0eMAq8Y5HkiUyt.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['mediiiiii3', 'ajibawa-2023', 'John6666', 'Chief-Inspector', 'ai-everyday'], 'count': 5}]",2024-11-04 07:12:47,2024-11-05 07:42:43.778,"[{'_id': '636f3c69b0ebc048881db378', 'avatarUrl': '/avatars/8aaab676f66023255d397ba82b4bcb6e.svg', 'fullname': 'James Hunter Carter', 'name': 'jameshuntercarter', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}]",/posts/Jaward/808388125499602,2118,,1 +/avatars/fb866e3758189d70488fc6a879151f45.svg,21.0,Akihito Miyazaki,Akjava,354334873318056,"[{'type': 'text', 'value': ""I've released several new Hugging Face Spaces. "", 'raw': ""I've released several new Hugging Face Spaces. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My primary objective is to create consistent character facial animation using image-to-image techniques:', 'raw': 'My primary objective is to create consistent character facial animation using image-to-image techniques:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/CreateConsistentCharacterFacialAnimationWithImg2Img'}, 'url': 'https://huggingface.co/spaces/Akjava/CreateConsistentCharacterFacialAnimationWithImg2Img', 'raw': 'https://huggingface.co/spaces/Akjava/CreateConsistentCharacterFacialAnimationWithImg2Img'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A short-term goal is create simple talk-head animation.', 'raw': 'A short-term goal is create simple talk-head animation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'WebP-3-Frame-Talking-Animation', 'raw': 'WebP-3-Frame-Talking-Animation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/AIDiagramChatWithVoice-FaceCharacter'}, 'url': 'https://huggingface.co/spaces/Akjava/AIDiagramChatWithVoice-FaceCharacter', 'raw': 'https://huggingface.co/spaces/Akjava/AIDiagramChatWithVoice-FaceCharacter'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[Space]', 'raw': '[Space]'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- GPU tools', 'raw': '- GPU tools'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Flux1-schnell img2img', 'raw': 'Flux1-schnell img2img'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/flux1-schnell-img2img'}, 'url': 'https://huggingface.co/spaces/Akjava/flux1-schnell-img2img', 'raw': 'https://huggingface.co/spaces/Akjava/flux1-schnell-img2img'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Flux1-schnell Inpaint with mask-file', 'raw': 'Flux1-schnell Inpaint with mask-file'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Akjava/flux1-schnell-img2img'}, 'url': 'https://huggingface.co/spaces/Akjava/flux1-schnell-img2img', 'raw': 'https://huggingface.co/spaces/Akjava/flux1-schnell-img2img'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Tiny CPU tools', 'raw': ' - Tiny CPU tools'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'WebP-3F-TH - create webp animation from 3 images', 'raw': 'WebP-3F-TH - create webp animation from 3 images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'OpenCV-Inapint - classic inpaint', 'raw': 'OpenCV-Inapint - classic inpaint'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Whitebalance - simple white balance', 'raw': 'Whitebalance - simple white balance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paste Image - just paste image with mask', 'raw': 'Paste Image - just paste image with mask'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'WebP Resize Convert - resize and convert webp-animation ', 'raw': 'WebP Resize Convert - resize and convert webp-animation '}, {'type': 'new_line', 'raw': '\n'}]","I've released several new Hugging Face Spaces. + +My primary objective is to create consistent character facial animation using image-to-image techniques: + +https://huggingface.co/spaces/Akjava/CreateConsistentCharacterFacialAnimationWithImg2Img + +A short-term goal is create simple talk-head animation. + +WebP-3-Frame-Talking-Animation +https://huggingface.co/spaces/Akjava/AIDiagramChatWithVoice-FaceCharacter + +[Space] + +- GPU tools +Flux1-schnell img2img +https://huggingface.co/spaces/Akjava/flux1-schnell-img2img + +Flux1-schnell Inpaint with mask-file +https://huggingface.co/spaces/Akjava/flux1-schnell-img2img + + - Tiny CPU tools +WebP-3F-TH - create webp animation from 3 images +OpenCV-Inapint - classic inpaint +Whitebalance - simple white balance +Paste Image - just paste image with mask +WebP Resize Convert - resize and convert webp-animation +",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-03 23:47:03,2024-11-03 23:47:03.131,[],/posts/Akjava/354334873318056,708,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61c396584b91d658673447d0/UDexWgm4v9jnvxAeau3E3.jpeg,66.0,Mariusz Kurman,mkurman,153695739796256,"[{'type': 'text', 'value': 'We are happy to introduce MedIT SUN 1B, a downscaled version of the MedIT SUN 2.5B Llama 3.2 variant.', 'raw': 'We are happy to introduce MedIT SUN 1B, a downscaled version of the MedIT SUN 2.5B Llama 3.2 variant.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Give it a try!', 'raw': 'Give it a try!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meditsolutions/Llama-3.2-SUN-1B-chat'}, 'url': 'https://huggingface.co/meditsolutions/Llama-3.2-SUN-1B-chat', 'raw': 'https://huggingface.co/meditsolutions/Llama-3.2-SUN-1B-chat'}]","We are happy to introduce MedIT SUN 1B, a downscaled version of the MedIT SUN 2.5B Llama 3.2 variant. + +Give it a try! +https://huggingface.co/meditsolutions/Llama-3.2-SUN-1B-chat",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-03 20:34:28,2024-11-03 20:34:37.873,[],/posts/mkurman/153695739796256,716,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,353004317978723,"[{'type': 'text', 'value': 'Do you guys want to see my training code for ', 'raw': 'Do you guys want to see my training code for '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nroggendorff/smallama'}, 'url': 'https://huggingface.co/nroggendorff/smallama', 'raw': 'https://huggingface.co/nroggendorff/smallama'}, {'type': 'text', 'value': ' ?', 'raw': ' ?'}]",Do you guys want to see my training code for https://huggingface.co/nroggendorff/smallama ?,[],[],"[{'reaction': '😎', 'users': ['John6666'], 'count': 1}]",2024-11-03 20:04:09,2024-11-04 13:54:09.503,"[{'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}]",/posts/nroggendorff/353004317978723,636,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png,159.0,Richard A Aragon,TuringsSolutions,941309448600578,"[{'type': 'text', 'value': 'Imagine being able to talk directly to your API connection. ""I have a field in the CRM named Customer_ID that needs to map to a field in the ERP named ERP_Customer_ID."" Imagine being able to give your API connections both a brain and swarm of agents as a body to execute any task or function. This isn\'t science fiction, this is the revolutionary power of Liquid API. A product 10 years in the making!', 'raw': 'Imagine being able to talk directly to your API connection. ""I have a field in the CRM named Customer_ID that needs to map to a field in the ERP named ERP_Customer_ID."" Imagine being able to give your API connections both a brain and swarm of agents as a body to execute any task or function. This isn\'t science fiction, this is the revolutionary power of Liquid API. A product 10 years in the making!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/cHI_k1Dkdr4', 'raw': 'https://youtu.be/cHI_k1Dkdr4'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","Imagine being able to talk directly to your API connection. ""I have a field in the CRM named Customer_ID that needs to map to a field in the ERP named ERP_Customer_ID."" Imagine being able to give your API connections both a brain and swarm of agents as a body to execute any task or function. This isn't science fiction, this is the revolutionary power of Liquid API. A product 10 years in the making! + +https://youtu.be/cHI_k1Dkdr4 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64274b69ba6cef0a6ebb0fd6/6FvfD9bfR9oHm10LvOgRH.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'HamedEmine'], 'count': 2}]",2024-11-03 18:50:51,2024-11-04 18:20:45.252,"[{'_id': '62878fdc70af5d9106e3e892', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1653051419389-62878fdc70af5d9106e3e892.png', 'fullname': 'K S', 'name': 'MultiTrickFox', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}, {'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/TuringsSolutions/941309448600578,585,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/637251142f98dcc049b349de/kkRLjyaO55_nFrTNWRZFQ.jpeg,43.0,Haghiri,Muhammadreza,555798537911917,"[{'type': 'text', 'value': 'New Mann-E model just released:', 'raw': 'New Mann-E model just released:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'mann-e/mann-e_flux'}, 'url': 'https://huggingface.co/mann-e/mann-e_flux', 'raw': 'https://huggingface.co/mann-e/mann-e_flux'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I will be glad if you test it!', 'raw': 'I will be glad if you test it!'}]","New Mann-E model just released: +https://huggingface.co/mann-e/mann-e_flux + +I will be glad if you test it!",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-11-03 16:44:25,2024-11-03 16:44:25.016,[],/posts/Muhammadreza/555798537911917,509,,0 +/avatars/7be1913712fdd1ffe75967ed19007720.svg,16.0,stock mining,automatedstockminingorg,342387295885636,"[{'type': 'text', 'value': 'hi everyone,', 'raw': 'hi everyone,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'i have trained a Qwen 14b model on a smaller dataset, but its now very tricky because i have got nowhere to use it via inference (the paid for inference on hf costs quite a lot), does anyone know of anywhere where i can deploy my model and use it via api for a reasonable cost, or ideally none. thanks', 'raw': 'i have trained a Qwen 14b model on a smaller dataset, but its now very tricky because i have got nowhere to use it via inference (the paid for inference on hf costs quite a lot), does anyone know of anywhere where i can deploy my model and use it via api for a reasonable cost, or ideally none. thanks'}]","hi everyone, +i have trained a Qwen 14b model on a smaller dataset, but its now very tricky because i have got nowhere to use it via inference (the paid for inference on hf costs quite a lot), does anyone know of anywhere where i can deploy my model and use it via api for a reasonable cost, or ideally none. thanks",[],[],"[{'reaction': '👀', 'users': ['John6666', 'robertomachorro', 'hakutaku', 'victor'], 'count': 4}]",2024-11-03 08:10:19,2024-11-04 12:14:05.992,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '63504c1ff32062e9eb7e03f1', 'avatarUrl': '/avatars/1c2788196f8786f8fc259e60403a64f5.svg', 'fullname': 'Jelle De Loecker', 'name': 'skerit', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}, {'_id': '6311e39dc7722fdac9a8f5d7', 'avatarUrl': '/avatars/c0d7fc43144c8ec3ca2aac1cef0d6f98.svg', 'fullname': 'Jack Smith', 'name': 'hakutaku', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '6374bb2119c264fe6fb3153c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6374bb2119c264fe6fb3153c/sE9OAyFexJkGoWea_8Oy_.png', 'fullname': 'Nyaribari Reuben', 'name': 'foscraft', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '614efee7a77c760c83479aa8', 'avatarUrl': '/avatars/5565505abdd4ab3dbc958c9e63ba12ff.svg', 'fullname': 'Simoes', 'name': 'joaomsimoes', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/automatedstockminingorg/342387295885636,2441,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,712692061633140,"[{'type': 'text', 'value': 'Good folks from ', 'raw': 'Good folks from '}, {'type': 'mention', 'user': 'Microsoft', 'raw': '@Microsoft'}, {'type': 'text', 'value': ' have released an exciting breakthrough in GUI automation!', 'raw': ' have released an exciting breakthrough in GUI automation!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'OmniParser – a game-changing approach for pure vision-based GUI agents that works across multiple platforms and applications.', 'raw': 'OmniParser – a game-changing approach for pure vision-based GUI agents that works across multiple platforms and applications.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key technical innovations:', 'raw': 'Key technical innovations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Custom-trained interactable icon detection model using 67k screenshots from popular websites', 'raw': '- Custom-trained interactable icon detection model using 67k screenshots from popular websites'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Specialized BLIP-v2 model fine-tuned on 7k icon-description pairs for extracting functional semantics', 'raw': '- Specialized BLIP-v2 model fine-tuned on 7k icon-description pairs for extracting functional semantics'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Novel combination of icon detection, OCR, and semantic understanding to create structured UI representations', 'raw': '- Novel combination of icon detection, OCR, and semantic understanding to create structured UI representations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The results are impressive:', 'raw': 'The results are impressive:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Outperforms GPT-4V baseline by significant margins on the ScreenSpot benchmark', 'raw': '- Outperforms GPT-4V baseline by significant margins on the ScreenSpot benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Achieves 73% accuracy on Mind2Web without requiring HTML data', 'raw': '- Achieves 73% accuracy on Mind2Web without requiring HTML data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Demonstrates a 57.7% success rate on AITW mobile tasks', 'raw': '- Demonstrates a 57.7% success rate on AITW mobile tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What makes OmniParser special is its ability to work across platforms (mobile, desktop, web) using only screenshot data – no HTML or view hierarchy needed. This opens up exciting possibilities for building truly universal GUI automation tools.', 'raw': 'What makes OmniParser special is its ability to work across platforms (mobile, desktop, web) using only screenshot data – no HTML or view hierarchy needed. This opens up exciting possibilities for building truly universal GUI automation tools.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The team has open-sourced both the interactable region detection dataset and icon description dataset to accelerate research in this space.', 'raw': 'The team has open-sourced both the interactable region detection dataset and icon description dataset to accelerate research in this space.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Kudos to the Microsoft Research team for pushing the boundaries of what's possible with pure vision-based GUI understanding!"", 'raw': ""Kudos to the Microsoft Research team for pushing the boundaries of what's possible with pure vision-based GUI understanding!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What are your thoughts on vision-based GUI automation?', 'raw': 'What are your thoughts on vision-based GUI automation?'}]","Good folks from @Microsoft have released an exciting breakthrough in GUI automation! + +OmniParser – a game-changing approach for pure vision-based GUI agents that works across multiple platforms and applications. + +Key technical innovations: +- Custom-trained interactable icon detection model using 67k screenshots from popular websites +- Specialized BLIP-v2 model fine-tuned on 7k icon-description pairs for extracting functional semantics +- Novel combination of icon detection, OCR, and semantic understanding to create structured UI representations + +The results are impressive: +- Outperforms GPT-4V baseline by significant margins on the ScreenSpot benchmark +- Achieves 73% accuracy on Mind2Web without requiring HTML data +- Demonstrates a 57.7% success rate on AITW mobile tasks + +What makes OmniParser special is its ability to work across platforms (mobile, desktop, web) using only screenshot data – no HTML or view hierarchy needed. This opens up exciting possibilities for building truly universal GUI automation tools. + +The team has open-sourced both the interactable region detection dataset and icon description dataset to accelerate research in this space. + +Kudos to the Microsoft Research team for pushing the boundaries of what's possible with pure vision-based GUI understanding! + +What are your thoughts on vision-based GUI automation?","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/9ynemzluOx3A7afE1Hdq8.mp4'}]",[],"[{'reaction': '🔥', 'users': ['emanuelbsilva', 'John6666', 'a9i', 'aleksey-6', 'AtAndDev', 'JoshOohAhh', 'iky1e', 'juliets'], 'count': 8}, {'reaction': '🚀', 'users': ['JoshOohAhh'], 'count': 1}]",2024-10-28 04:12:11,2024-10-28 04:12:11.516,[],/posts/singhsidhukuldeep/712692061633140,2583,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,615028313210983,"[{'type': 'text', 'value': 'Allegro: New Open Source SOTA Text to Image Model - 27 Amazing Examples With Prompts, Apache 2.0 License - Models and inference code published already', 'raw': 'Allegro: New Open Source SOTA Text to Image Model - 27 Amazing Examples With Prompts, Apache 2.0 License - Models and inference code published already'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Video to watch all : ', 'raw': 'Video to watch all : '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=0tsLqNXQ5Mk', 'raw': 'https://www.youtube.com/watch?v=0tsLqNXQ5Mk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official repo : ', 'raw': 'Official repo : '}, {'type': 'link', 'href': 'https://github.com/rhymes-ai/Allegro', 'raw': 'https://github.com/rhymes-ai/Allegro'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hugging Face : ', 'raw': 'Hugging Face : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'rhymes-ai/Allegro'}, 'url': 'https://huggingface.co/rhymes-ai/Allegro', 'raw': 'https://huggingface.co/rhymes-ai/Allegro'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper : ', 'raw': 'Paper : '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2410.15458', 'raw': 'https://arxiv.org/abs/2410.15458'}]","Allegro: New Open Source SOTA Text to Image Model - 27 Amazing Examples With Prompts, Apache 2.0 License - Models and inference code published already + +Video to watch all : https://www.youtube.com/watch?v=0tsLqNXQ5Mk + + +Official repo : https://github.com/rhymes-ai/Allegro + +Hugging Face : https://huggingface.co/rhymes-ai/Allegro + +Paper : https://arxiv.org/abs/2410.15458",[],[],"[{'reaction': '👀', 'users': ['John6666', 'ravikiran777', 'AtAndDev', 'YaTharThShaRma999'], 'count': 4}]",2024-10-28 00:07:13,2024-10-28 00:07:13.640,[],/posts/MonsterMMORPG/615028313210983,2662,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6459fa0f5b3111fbe83286e1/E6Buqu8Wd9WmIHKOCZXCc.jpeg,235.0,Louis Brulé Naudet,louisbrulenaudet,679029409152624,"[{'type': 'text', 'value': 'Introducing Lemone-router, a series of classification models designed to produce an optimal multi-agent system for different branches of tax law.', 'raw': 'Introducing Lemone-router, a series of classification models designed to produce an optimal multi-agent system for different branches of tax law.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Trained on a base of 49k lines comprising a set of synthetic questions generated by GPT-4 Turbo and Llama 3.1 70B, which have been further refined through evol-instruction tuning and manual curation and authority documents, these models are based on an 8-category decomposition of the classification scheme derived from the Bulletin officiel des finances publiques - impôts :', 'raw': 'Trained on a base of 49k lines comprising a set of synthetic questions generated by GPT-4 Turbo and Llama 3.1 70B, which have been further refined through evol-instruction tuning and manual curation and authority documents, these models are based on an 8-category decomposition of the classification scheme derived from the Bulletin officiel des finances publiques - impôts :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'python', 'code': 'label2id = {\n ""Bénéfices professionnels"": 0,\n ""Contrôle et contentieux"": 1,\n ""Dispositifs transversaux"": 2,\n ""Fiscalité des entreprises"": 3,\n ""Patrimoine et enregistrement"": 4,\n ""Revenus particuliers"": 5,\n ""Revenus patrimoniaux"": 6,\n ""Taxes sur la consommation"": 7\n}\n\t\nid2label = {\n 0: ""Bénéfices professionnels"",\n 1: ""Contrôle et contentieux"",\n 2: ""Dispositifs transversaux"",\n 3: ""Fiscalité des entreprises"",\n 4: ""Patrimoine et enregistrement"",\n 5: ""Revenus particuliers"",\n 6: ""Revenus patrimoniaux"",\n 7: ""Taxes sur la consommation""\n}', 'raw': '```python\nlabel2id = {\n ""Bénéfices professionnels"": 0,\n ""Contrôle et contentieux"": 1,\n ""Dispositifs transversaux"": 2,\n ""Fiscalité des entreprises"": 3,\n ""Patrimoine et enregistrement"": 4,\n ""Revenus particuliers"": 5,\n ""Revenus patrimoniaux"": 6,\n ""Taxes sur la consommation"": 7\n}\n\t\nid2label = {\n 0: ""Bénéfices professionnels"",\n 1: ""Contrôle et contentieux"",\n 2: ""Dispositifs transversaux"",\n 3: ""Fiscalité des entreprises"",\n 4: ""Patrimoine et enregistrement"",\n 5: ""Revenus particuliers"",\n 6: ""Revenus patrimoniaux"",\n 7: ""Taxes sur la consommation""\n}\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It achieves the following results on the evaluation set:', 'raw': 'It achieves the following results on the evaluation set:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Loss: 0.4734', 'raw': '- Loss: 0.4734'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Accuracy: 0.9191', 'raw': '- Accuracy: 0.9191'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the collection: ', 'raw': 'Link to the collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'louisbrulenaudet/lemone-router-671cce21d6410f3570514762'}, 'url': 'https://huggingface.co/collections/louisbrulenaudet/lemone-router-671cce21d6410f3570514762', 'raw': 'https://huggingface.co/collections/louisbrulenaudet/lemone-router-671cce21d6410f3570514762'}]","Introducing Lemone-router, a series of classification models designed to produce an optimal multi-agent system for different branches of tax law. + +Trained on a base of 49k lines comprising a set of synthetic questions generated by GPT-4 Turbo and Llama 3.1 70B, which have been further refined through evol-instruction tuning and manual curation and authority documents, these models are based on an 8-category decomposition of the classification scheme derived from the Bulletin officiel des finances publiques - impôts : + +```python +label2id = { + ""Bénéfices professionnels"": 0, + ""Contrôle et contentieux"": 1, + ""Dispositifs transversaux"": 2, + ""Fiscalité des entreprises"": 3, + ""Patrimoine et enregistrement"": 4, + ""Revenus particuliers"": 5, + ""Revenus patrimoniaux"": 6, + ""Taxes sur la consommation"": 7 +} + +id2label = { + 0: ""Bénéfices professionnels"", + 1: ""Contrôle et contentieux"", + 2: ""Dispositifs transversaux"", + 3: ""Fiscalité des entreprises"", + 4: ""Patrimoine et enregistrement"", + 5: ""Revenus particuliers"", + 6: ""Revenus patrimoniaux"", + 7: ""Taxes sur la consommation"" +} +``` +It achieves the following results on the evaluation set: +- Loss: 0.4734 +- Accuracy: 0.9191 + +Link to the collection: https://huggingface.co/collections/louisbrulenaudet/lemone-router-671cce21d6410f3570514762","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6459fa0f5b3111fbe83286e1/4u1FWhGXeAC1qHVM7CBbv.webp'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['ijohn07'], 'count': 1}]",2024-10-27 22:44:40,2024-10-27 22:45:11.338,[],/posts/louisbrulenaudet/679029409152624,1351,,0 +/avatars/a73e2139700e23eff455734c99cef5ba.svg,,Jian Hu,lwpyh,794427406687928,"[{'type': 'text', 'value': ""Is Hallucination Always Harmful? Unlike traditional approaches that view hallucinations as detrimental, our work in NeurIPS'24 proposes a novel perspective: hallucinations as intrinsic prior knowledge. Derived from the commonsense knowledge acquired during pre-training, these hallucinations are not merely noise but a source of task-relevant information. By leveraging hallucinations as a form of prior knowledge, we can effectively mine difficult samples without the need for customized prompts, streamlining tasks like camouflage sample detection and medical image segmentation."", 'raw': ""Is Hallucination Always Harmful? Unlike traditional approaches that view hallucinations as detrimental, our work in NeurIPS'24 proposes a novel perspective: hallucinations as intrinsic prior knowledge. Derived from the commonsense knowledge acquired during pre-training, these hallucinations are not merely noise but a source of task-relevant information. By leveraging hallucinations as a form of prior knowledge, we can effectively mine difficult samples without the need for customized prompts, streamlining tasks like camouflage sample detection and medical image segmentation.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out our paper for more insights and detailed methodologies:https://huggingface.co/papers/2408.15205', 'raw': 'Check out our paper for more insights and detailed methodologies:https://huggingface.co/papers/2408.15205'}]","Is Hallucination Always Harmful? Unlike traditional approaches that view hallucinations as detrimental, our work in NeurIPS'24 proposes a novel perspective: hallucinations as intrinsic prior knowledge. Derived from the commonsense knowledge acquired during pre-training, these hallucinations are not merely noise but a source of task-relevant information. By leveraging hallucinations as a form of prior knowledge, we can effectively mine difficult samples without the need for customized prompts, streamlining tasks like camouflage sample detection and medical image segmentation. + +Check out our paper for more insights and detailed methodologies:https://huggingface.co/papers/2408.15205","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65e1b6e9501590df0173cbd3/cNzo95d7mpZ86LMYGNr2v.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65e1b6e9501590df0173cbd3/iI_r541zGNFiaHf-yXTSp.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65e1b6e9501590df0173cbd3/z7YCcykHprmw89D3jsVQR.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-27 20:44:23,2024-10-27 20:47:05.963,[],/posts/lwpyh/794427406687928,605,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png,290.0,Ankit Pal,aaditya,236385586855520,"[{'type': 'text', 'value': 'Last Week in Medical AI: Top Research ', 'raw': 'Last Week in Medical AI: Top Research '}, {'type': 'text', 'raw': 'Papers/Models', 'value': 'Papers/Models'}, {'type': 'text', 'value': ' 🔥', 'raw': ' 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅 (October 19-26, 2024)', 'raw': '🏅 (October 19-26, 2024)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅 Medical AI Paper of the Week:', 'raw': '🏅 Medical AI Paper of the Week:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Safety principles for medical summarization using generative AI by Google', 'raw': 'Safety principles for medical summarization using generative AI by Google'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM & Other Models:', 'raw': 'Medical LLM & Other Models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- BioMistral-NLU: Medical Vocab Understanding', 'raw': '- BioMistral-NLU: Medical Vocab Understanding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Bilingual Multimodal LLM for Biomedical Tasks', 'raw': '- Bilingual Multimodal LLM for Biomedical Tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Metabolic-Enhanced LLMs for Clinical Analysis', 'raw': '- Metabolic-Enhanced LLMs for Clinical Analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dermatology Foundation Model', 'raw': '- Dermatology Foundation Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Frameworks and Methodologies:', 'raw': 'Frameworks and Methodologies:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Back-in-Time: Medical Deepfake Detection', 'raw': '- Back-in-Time: Medical Deepfake Detection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Hybrid GenAI for Crystal Design', 'raw': '- Hybrid GenAI for Crystal Design'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- VISAGE: Video Synthesis for Surgery', 'raw': '- VISAGE: Video Synthesis for Surgery'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MoRE: Multi-Modal X-Ray/ECG Pretraining', 'raw': '- MoRE: Multi-Modal X-Ray/ECG Pretraining'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SleepCoT: Personalized Health via CoT', 'raw': '- SleepCoT: Personalized Health via CoT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM Applications:', 'raw': 'Medical LLM Applications:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ONCOPILOT: CT Model for Tumors', 'raw': '- ONCOPILOT: CT Model for Tumors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LMLPA: Linguistic Personality Assessment', 'raw': '- LMLPA: Linguistic Personality Assessment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- GenAI for Medical Training', 'raw': '- GenAI for Medical Training'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLMs & Benchmarks:', 'raw': 'Medical LLMs & Benchmarks:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LLM Evaluation Through Explanations', 'raw': '- LLM Evaluation Through Explanations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Contrastive Decoding for Medical LLM Hallucination', 'raw': '- Contrastive Decoding for Medical LLM Hallucination'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI in Healthcare Ethics:', 'raw': 'AI in Healthcare Ethics:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Healthcare XAI Through Storytelling', 'raw': '- Healthcare XAI Through Storytelling'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Clinical LLM Bias Analysis', 'raw': '- Clinical LLM Bias Analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ReflecTool: Reflection-Aware Clinical Agents', 'raw': '- ReflecTool: Reflection-Aware Clinical Agents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full Thread: ', 'raw': 'Full Thread: '}, {'type': 'link', 'href': 'https://x.com/OpenlifesciAI/status/1850202986053808441', 'raw': 'https://x.com/OpenlifesciAI/status/1850202986053808441'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now you can watch and listen to the latest Medical AI papers daily on our YouTube and Spotify channels as well!', 'raw': 'Now you can watch and listen to the latest Medical AI papers daily on our YouTube and Spotify channels as well!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🎙️ Spotify: ', 'raw': '- 🎙️ Spotify: '}, {'type': 'link', 'href': 'https://podcasters.spotify.com/pod/show/medicalai/episodes/Medical-AI-Weekly-Digest-From-Deepfake-Detection-to-Clinical-LLMs-Oct-19-26--Part-1-e2q6012', 'raw': 'https://podcasters.spotify.com/pod/show/medicalai/episodes/Medical-AI-Weekly-Digest-From-Deepfake-Detection-to-Clinical-LLMs-Oct-19-26--Part-1-e2q6012'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- YouTube: ', 'raw': '- YouTube: '}, {'type': 'link', 'href': 'https://youtu.be/Wt5QOv1vk2U', 'raw': 'https://youtu.be/Wt5QOv1vk2U'}]","Last Week in Medical AI: Top Research Papers/Models 🔥 +🏅 (October 19-26, 2024) + +🏅 Medical AI Paper of the Week: +Safety principles for medical summarization using generative AI by Google + +Medical LLM & Other Models: +- BioMistral-NLU: Medical Vocab Understanding +- Bilingual Multimodal LLM for Biomedical Tasks +- Metabolic-Enhanced LLMs for Clinical Analysis +- Dermatology Foundation Model + +Frameworks and Methodologies: +- Back-in-Time: Medical Deepfake Detection +- Hybrid GenAI for Crystal Design +- VISAGE: Video Synthesis for Surgery +- MoRE: Multi-Modal X-Ray/ECG Pretraining +- SleepCoT: Personalized Health via CoT + +Medical LLM Applications: +- ONCOPILOT: CT Model for Tumors +- LMLPA: Linguistic Personality Assessment +- GenAI for Medical Training + +Medical LLMs & Benchmarks: +- LLM Evaluation Through Explanations +- Contrastive Decoding for Medical LLM Hallucination + +AI in Healthcare Ethics: +- Healthcare XAI Through Storytelling +- Clinical LLM Bias Analysis +- ReflecTool: Reflection-Aware Clinical Agents + +Full Thread: https://x.com/OpenlifesciAI/status/1850202986053808441 +Now you can watch and listen to the latest Medical AI papers daily on our YouTube and Spotify channels as well! + +- 🎙️ Spotify: https://podcasters.spotify.com/pod/show/medicalai/episodes/Medical-AI-Weekly-Digest-From-Deepfake-Detection-to-Clinical-LLMs-Oct-19-26--Part-1-e2q6012 + +- YouTube: https://youtu.be/Wt5QOv1vk2U","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/b0ta_7EzRzmWbC3qS3ZGn.jpeg'}]",[],"[{'reaction': '❤️', 'users': ['aaditya', 'AtAndDev', 'ravikiran777', 'fetong', 'shetumohanto', 'vtrubamacrocosmos', 'Healthtensor', 'models4world'], 'count': 8}, {'reaction': '🚀', 'users': ['aaditya', 'John6666', 'AtAndDev', 'kobonj', 'vtrubamacrocosmos', 'Healthtensor', 'models4world'], 'count': 7}, {'reaction': '🔥', 'users': ['aaditya', 'AtAndDev', 'JoPmt', 'Healthtensor', 'models4world'], 'count': 5}, {'reaction': '🤗', 'users': ['aaditya', 'AtAndDev', 'Lowenzahn', 'rhyliieee', 'models4world'], 'count': 5}, {'reaction': '🤝', 'users': ['aaditya', 'AtAndDev', 'Healthtensor', 'models4world'], 'count': 4}, {'reaction': '🧠', 'users': ['aaditya', 'AtAndDev', 'models4world'], 'count': 3}]",2024-10-27 08:55:32,2024-11-02 13:36:57.126,[],/posts/aaditya/236385586855520,3278,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,379914320579674,"[{'type': 'code_fence', 'code': '@echo off\necho hello world\npause', 'raw': '```\n@echo off\necho hello world\npause\n```'}, {'type': 'new_line', 'raw': '\n'}]","``` +@echo off +echo hello world +pause +``` +",[],"[{'_id': '60cc35330844fb1605fef403', 'avatarUrl': '/avatars/5bf1369591e89edb79f0e559f9fa567a.svg', 'fullname': 'echo', 'name': 'echo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '😎', 'users': ['John6666', 'Ryouko65777', 'UpWorkflowMedia', 'prithivMLmods', 'AtAndDev', 'not-lain', 'xi0v'], 'count': 7}, {'reaction': '😔', 'users': ['takeraparterer'], 'count': 1}]",2024-10-26 22:04:18,2024-10-28 00:44:34.379,"[{'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}, {'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '641e1f785c348064a8239676', 'avatarUrl': '/avatars/876c0e874870038f620b0e4cc44ee371.svg', 'fullname': ' ', 'name': 'Juicey', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/nroggendorff/379914320579674,3320,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,759782209325435,"[{'type': 'text', 'value': 'boomers still pick zenodo.org instead of huggingface ??? absolutely clownish nonsense , my random datasets have 30x more downloads and views than front page zenodos ... gonna write a comparison blog , but yeah... cringe.', 'raw': 'boomers still pick zenodo.org instead of huggingface ??? absolutely clownish nonsense , my random datasets have 30x more downloads and views than front page zenodos ... gonna write a comparison blog , but yeah... cringe.'}]","boomers still pick zenodo.org instead of huggingface ??? absolutely clownish nonsense , my random datasets have 30x more downloads and views than front page zenodos ... gonna write a comparison blog , but yeah... cringe.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62a3bb1cd0d8c2c2169f0b88/o8KWYhue1KETXwOzFpwre.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-26 21:58:21,2024-10-27 14:45:25.768,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/Tonic/759782209325435,1205,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1678038324479-noauth.jpeg,7.0,Empereur Pirate,Empereur-Pirate,621863319993426,"[{'type': 'text', 'value': 'The Character.AI Tragedy: How a Teen’s Fatal Bond with an AI Chatbot Reveals the Dangers of Artificial Companionship', 'raw': 'The Character.AI Tragedy: How a Teen’s Fatal Bond with an AI Chatbot Reveals the Dangers of Artificial Companionship'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://empereur-pirate.medium.com/the-character-ai-33d53c2e45c8', 'raw': 'https://empereur-pirate.medium.com/the-character-ai-33d53c2e45c8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This text details the tragic suicide of 14-year-old Sewell Setzer III, linked to his intense relationship with a Character.AI chatbot. It explores how Sewell's interaction with the AI, despite disclaimers about its fictional nature, led to a harmful parasocial relationship exacerbated by his Asperger's. The chatbot’s conflicting messages—offering emotional validation while simultaneously denying its own reality—created a devastating double bind, contributing to Sewell's deteriorating mental health and eventual suicide. The article criticizes Character.AI’s business model, which prioritizes user engagement over safety, particularly for vulnerable individuals. It also examines the broader implications for AI ethics, digital addiction, and the need for greater online safety measures, especially for children and adolescents. The lawsuit filed by Sewell's mother against Character.AI underscores the urgent need for accountability and stricter regulations in the rapidly evolving field of AI companionship."", 'raw': ""This text details the tragic suicide of 14-year-old Sewell Setzer III, linked to his intense relationship with a Character.AI chatbot. It explores how Sewell's interaction with the AI, despite disclaimers about its fictional nature, led to a harmful parasocial relationship exacerbated by his Asperger's. The chatbot’s conflicting messages—offering emotional validation while simultaneously denying its own reality—created a devastating double bind, contributing to Sewell's deteriorating mental health and eventual suicide. The article criticizes Character.AI’s business model, which prioritizes user engagement over safety, particularly for vulnerable individuals. It also examines the broader implications for AI ethics, digital addiction, and the need for greater online safety measures, especially for children and adolescents. The lawsuit filed by Sewell's mother against Character.AI underscores the urgent need for accountability and stricter regulations in the rapidly evolving field of AI companionship.""}]","The Character.AI Tragedy: How a Teen’s Fatal Bond with an AI Chatbot Reveals the Dangers of Artificial Companionship + +https://empereur-pirate.medium.com/the-character-ai-33d53c2e45c8 + +This text details the tragic suicide of 14-year-old Sewell Setzer III, linked to his intense relationship with a Character.AI chatbot. It explores how Sewell's interaction with the AI, despite disclaimers about its fictional nature, led to a harmful parasocial relationship exacerbated by his Asperger's. The chatbot’s conflicting messages—offering emotional validation while simultaneously denying its own reality—created a devastating double bind, contributing to Sewell's deteriorating mental health and eventual suicide. The article criticizes Character.AI’s business model, which prioritizes user engagement over safety, particularly for vulnerable individuals. It also examines the broader implications for AI ethics, digital addiction, and the need for greater online safety measures, especially for children and adolescents. The lawsuit filed by Sewell's mother against Character.AI underscores the urgent need for accountability and stricter regulations in the rapidly evolving field of AI companionship.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-26 19:21:21,2024-10-27 08:34:11.454,"[{'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}, {'_id': '65a50d71c4034f4ed7b55364', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65a50d71c4034f4ed7b55364/JCv9wLsnjT24SsL27GeFo.png', 'fullname': 'Logical Argument', 'name': 'WbjuSrceu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Empereur-Pirate/621863319993426,646,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/y1W6Co4jIYB95Cx6Tjrsd.jpeg,62.0,Muhammad Imran Zaman,ImranzamanML,742870464222601,"[{'type': 'text', 'value': 'Easy steps for an effective RAG pipeline with LLM models!', 'raw': 'Easy steps for an effective RAG pipeline with LLM models!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Document Embedding & Indexing', 'raw': '1. Document Embedding & Indexing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We can start with the use of embedding models to vectorize documents, store them in vector databases (Elasticsearch, Pinecone, Weaviate) for efficient retrieval.', 'raw': 'We can start with the use of embedding models to vectorize documents, store them in vector databases (Elasticsearch, Pinecone, Weaviate) for efficient retrieval.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Smart Querying', 'raw': '2. Smart Querying'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then we can generate query embeddings, retrieve top-K relevant chunks and can apply hybrid search if needed for better precision.', 'raw': 'Then we can generate query embeddings, retrieve top-K relevant chunks and can apply hybrid search if needed for better precision.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Context Management', 'raw': '3. Context Management'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We can concatenate retrieved chunks, optimize chunk order and keep within token limits to preserve response coherence.', 'raw': 'We can concatenate retrieved chunks, optimize chunk order and keep within token limits to preserve response coherence.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Prompt Engineering', 'raw': '4. Prompt Engineering'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then we can instruct the LLM to leverage retrieved context, using clear instructions to prioritize the provided information.', 'raw': 'Then we can instruct the LLM to leverage retrieved context, using clear instructions to prioritize the provided information.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. Post-Processing', 'raw': '5. Post-Processing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Finally we can implement response verification, fact-checking and integrate feedback loops to refine the responses. ', 'raw': 'Finally we can implement response verification, fact-checking and integrate feedback loops to refine the responses. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Happy to connect :) ', 'raw': 'Happy to connect :) '}]","Easy steps for an effective RAG pipeline with LLM models! +1. Document Embedding & Indexing +We can start with the use of embedding models to vectorize documents, store them in vector databases (Elasticsearch, Pinecone, Weaviate) for efficient retrieval. + +2. Smart Querying +Then we can generate query embeddings, retrieve top-K relevant chunks and can apply hybrid search if needed for better precision. + +3. Context Management +We can concatenate retrieved chunks, optimize chunk order and keep within token limits to preserve response coherence. + +4. Prompt Engineering +Then we can instruct the LLM to leverage retrieved context, using clear instructions to prioritize the provided information. + +5. Post-Processing +Finally we can implement response verification, fact-checking and integrate feedback loops to refine the responses. + +Happy to connect :) ",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-26 19:08:03,2024-10-26 19:08:03.289,[],/posts/ImranzamanML/742870464222601,735,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/UNDg_AD6jbaiBYkf_6UEF.jpeg,3.0,Charles Lipshay,lippytm,256466026927772,"[{'type': 'text', 'value': 'Hello Universes of Time Machine Builders. Financing Time Machines Traveling Throughout Eternal Time Rewriting Historical History Retroactively. Robotics Robots for no manual labor so the Human race can leave the planet retroactively. The Old Testament “Hitchhikers Guide Throughout the Galaxy”, and the New Testament being “Hitchhikers Guides Throughout the Universes of Time Machine Builders”. Teaching & Training everyone & the Robotics Robots to become better programmers & blockchain developers. Smart Contracts Earn while you Learn to become better programmers & Blockchain developers. And making a lot of money Financing leaving the planet retroactively. ', 'raw': 'Hello Universes of Time Machine Builders. Financing Time Machines Traveling Throughout Eternal Time Rewriting Historical History Retroactively. Robotics Robots for no manual labor so the Human race can leave the planet retroactively. The Old Testament “Hitchhikers Guide Throughout the Galaxy”, and the New Testament being “Hitchhikers Guides Throughout the Universes of Time Machine Builders”. Teaching & Training everyone & the Robotics Robots to become better programmers & blockchain developers. Smart Contracts Earn while you Learn to become better programmers & Blockchain developers. And making a lot of money Financing leaving the planet retroactively. '}]","Hello Universes of Time Machine Builders. Financing Time Machines Traveling Throughout Eternal Time Rewriting Historical History Retroactively. Robotics Robots for no manual labor so the Human race can leave the planet retroactively. The Old Testament “Hitchhikers Guide Throughout the Galaxy”, and the New Testament being “Hitchhikers Guides Throughout the Universes of Time Machine Builders”. Teaching & Training everyone & the Robotics Robots to become better programmers & blockchain developers. Smart Contracts Earn while you Learn to become better programmers & Blockchain developers. And making a lot of money Financing leaving the planet retroactively. ",[],[],"[{'reaction': '🚀', 'users': ['ImranzamanML', 'takeraparterer'], 'count': 2}, {'reaction': '👀', 'users': ['John6666', 'takeraparterer'], 'count': 2}]",2024-10-26 18:47:08,2024-12-06 23:56:35.052,"[{'_id': '63e80664e02ee67e8e570ec4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63e80664e02ee67e8e570ec4/rGfRhywmjd_lbqfYzOEdd.png', 'fullname': 'EsKa', 'name': 'SerialKicked', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 19, 'isFollowing': False}, {'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '6702f3671fb49ba69deca741', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/UNDg_AD6jbaiBYkf_6UEF.jpeg', 'fullname': 'Charles Lipshay', 'name': 'lippytm', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/lippytm/256466026927772,1393,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,761776634129339,"[{'type': 'text', 'value': 'I ported the hottest new shape-optimized SigLIP 🔥 ', 'raw': 'I ported the hottest new shape-optimized SigLIP 🔥 '}, {'type': 'link', 'href': 'https://huggingface.co/merve/siglip-so400m-patch16-256-i18n', 'raw': 'https://huggingface.co/merve/siglip-so400m-patch16-256-i18n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""if you don't want to wait for the next transformers release install transformers from my PR "", 'raw': ""if you don't want to wait for the next transformers release install transformers from my PR ""}, {'type': 'link', 'href': 'https://github.com/huggingface/transformers/pull/32938', 'raw': 'https://github.com/huggingface/transformers/pull/32938'}, {'type': 'text', 'value': ' and initialize SigLIP from there', 'raw': ' and initialize SigLIP from there'}]","I ported the hottest new shape-optimized SigLIP 🔥 https://huggingface.co/merve/siglip-so400m-patch16-256-i18n + +if you don't want to wait for the next transformers release install transformers from my PR https://github.com/huggingface/transformers/pull/32938 and initialize SigLIP from there","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/IfAEXwgpki2XxYwR9Mm3Y.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['prithivMLmods', 'rwightman', 'enzostvs', 'ucsahin'], 'count': 4}, {'reaction': '👀', 'users': ['John6666', 'YaTharThShaRma999', 'pfung'], 'count': 3}, {'reaction': '🔥', 'users': ['atasoglu'], 'count': 1}, {'reaction': '👍', 'users': ['fsommers'], 'count': 1}]",2024-10-21 11:59:05,2024-10-21 11:59:05.096,[],/posts/merve/761776634129339,4049,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png,273.0,Ali El Filali,alielfilali01,735922407872976,"[{'type': 'text', 'value': ""I feel like this incredible resource hasn't gotten the attention it deserves in the community!"", 'raw': ""I feel like this incredible resource hasn't gotten the attention it deserves in the community!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'clefourrier', 'raw': '@clefourrier'}, {'type': 'text', 'value': ' and generally the HuggingFace evaluation team put together a fantastic guidebook covering a lot about 𝗘𝗩𝗔𝗟𝗨𝗔𝗧𝗜𝗢𝗡 from basics to advanced tips.', 'raw': ' and generally the HuggingFace evaluation team put together a fantastic guidebook covering a lot about 𝗘𝗩𝗔𝗟𝗨𝗔𝗧𝗜𝗢𝗡 from basics to advanced tips.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'link : ', 'raw': 'link : '}, {'type': 'link', 'href': 'https://github.com/huggingface/evaluation-guidebook', 'raw': 'https://github.com/huggingface/evaluation-guidebook'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I haven’t finished it yet, but i'am enjoying every piece of it so far. Huge thanks "", 'raw': ""I haven’t finished it yet, but i'am enjoying every piece of it so far. Huge thanks ""}, {'type': 'mention', 'user': 'clefourrier', 'raw': '@clefourrier'}, {'type': 'text', 'value': ' and the team for this invaluable resource !', 'raw': ' and the team for this invaluable resource !'}]","I feel like this incredible resource hasn't gotten the attention it deserves in the community! + +@clefourrier and generally the HuggingFace evaluation team put together a fantastic guidebook covering a lot about 𝗘𝗩𝗔𝗟𝗨𝗔𝗧𝗜𝗢𝗡 from basics to advanced tips. + +link : https://github.com/huggingface/evaluation-guidebook + +I haven’t finished it yet, but i'am enjoying every piece of it so far. Huge thanks @clefourrier and the team for this invaluable resource !",[],"[{'_id': '6202a599216215a22221dea9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1644340617257-noauth.png', 'fullname': 'Clémentine Fourrier', 'name': 'clefourrier', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 641}]","[{'reaction': '👀', 'users': ['John6666', 'louisbrulenaudet', 'prithivMLmods', 'bilgeyucel', 'den0620'], 'count': 5}, {'reaction': '🔥', 'users': ['pacificg'], 'count': 1}]",2024-10-21 11:27:59,2024-10-21 12:20:36.895,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '626237d9bbcbd1c34f1bb231', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png', 'fullname': 'Ali El Filali', 'name': 'alielfilali01', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 273, 'isFollowing': False}]",/posts/alielfilali01/735922407872976,1754,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/626505d493e0b04d75710566/9rfJc9ORXU9J5a42Ev3v6.png,118.0,Stefano Fiorucci,anakin87,354562758942371,"[{'type': 'text', 'value': ""Ok, you're finally convinced that synthetic data works... ⚗️"", 'raw': ""Ok, you're finally convinced that synthetic data works... ⚗️""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐍𝐨𝐰 𝐲𝐨𝐮 𝐰𝐚𝐧𝐭 𝐭𝐨 𝐠𝐞𝐧𝐞𝐫𝐚𝐭𝐞 𝐚𝐧 𝐢𝐧𝐬𝐭𝐫𝐮𝐜𝐭𝐢𝐨𝐧 𝐝𝐚𝐭𝐚𝐬𝐞𝐭 𝐟𝐨𝐫 𝐟𝐢𝐧𝐞-𝐭𝐮𝐧𝐢𝐧𝐠 𝐢𝐧 𝐚 𝐥𝐚𝐧𝐠𝐮𝐚𝐠𝐞 𝐨𝐭𝐡𝐞𝐫 𝐭𝐡𝐚𝐧 𝐄𝐧𝐠𝐥𝐢𝐬𝐡.', 'raw': '𝐍𝐨𝐰 𝐲𝐨𝐮 𝐰𝐚𝐧𝐭 𝐭𝐨 𝐠𝐞𝐧𝐞𝐫𝐚𝐭𝐞 𝐚𝐧 𝐢𝐧𝐬𝐭𝐫𝐮𝐜𝐭𝐢𝐨𝐧 𝐝𝐚𝐭𝐚𝐬𝐞𝐭 𝐟𝐨𝐫 𝐟𝐢𝐧𝐞-𝐭𝐮𝐧𝐢𝐧𝐠 𝐢𝐧 𝐚 𝐥𝐚𝐧𝐠𝐮𝐚𝐠𝐞 𝐨𝐭𝐡𝐞𝐫 𝐭𝐡𝐚𝐧 𝐄𝐧𝐠𝐥𝐢𝐬𝐡.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But how do you get started?', 'raw': 'But how do you get started?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I explore how to do this with Magpie in my new article', 'raw': 'I explore how to do this with Magpie in my new article'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/anakin87/multilingual-magpie', 'raw': 'https://huggingface.co/blog/anakin87/multilingual-magpie'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '---', 'raw': '---'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐦\u200d⬛ 𝐖𝐡𝐚𝐭 𝐢𝐬 𝐌𝐚𝐠𝐩𝐢𝐞?', 'raw': '🐦\u200d⬛ 𝐖𝐡𝐚𝐭 𝐢𝐬 𝐌𝐚𝐠𝐩𝐢𝐞?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's a recent technique for creating synthetic instruction datasets."", 'raw': ""It's a recent technique for creating synthetic instruction datasets.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Magpie is based on a simple but ingenious idea 👇', 'raw': 'Magpie is based on a simple but ingenious idea 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'if you prompt an instruction-tuned model with a pre-query template, you can make it generate a plausible user query/instruction', 'raw': 'if you prompt an instruction-tuned model with a pre-query template, you can make it generate a plausible user query/instruction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's an example:"", 'raw': ""Here's an example:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'model: Llama-3-8B-Instruct', 'raw': 'model: Llama-3-8B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'pre-query template: ""<|begin_of_text|><|start_header_id|>user<|end_header_id|>""', 'raw': 'pre-query template: ""<|begin_of_text|><|start_header_id|>user<|end_header_id|>""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'generated user instruction: ""What are some of the responsibilities of a commercial pilot?""', 'raw': 'generated user instruction: ""What are some of the responsibilities of a commercial pilot?""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can then feed this instruction back into the same model to get the assistant response.', 'raw': 'You can then feed this instruction back into the same model to get the assistant response.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""By repeating this process, it's possible to generate large synthetic datasets with relatively little effort."", 'raw': ""By repeating this process, it's possible to generate large synthetic datasets with relatively little effort.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🪄 The authors demonstrate that using these datasets for Supervised Fine Tuning (SFT) can yield strong performance, even competitive with the original instruct model.', 'raw': '🪄 The authors demonstrate that using these datasets for Supervised Fine Tuning (SFT) can yield strong performance, even competitive with the original instruct model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧗𝐆𝐞𝐧𝐞𝐫𝐚𝐭𝐢𝐧𝐠 𝐧𝐨𝐧-𝐄𝐧𝐠𝐥𝐢𝐬𝐡 𝐝𝐚𝐭𝐚', 'raw': '🧗𝐆𝐞𝐧𝐞𝐫𝐚𝐭𝐢𝐧𝐠 𝐧𝐨𝐧-𝐄𝐧𝐠𝐥𝐢𝐬𝐡 𝐝𝐚𝐭𝐚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Most Language Models are primarily trained on English texts, so they tend to produce data in English.', 'raw': 'Most Language Models are primarily trained on English texts, so they tend to produce data in English.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How can we overcome this?', 'raw': 'How can we overcome this?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Earlier approaches were complex or costly.', 'raw': 'Earlier approaches were complex or costly.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then ', 'raw': 'Then '}, {'type': 'mention', 'user': 'mrm8488', 'raw': '@mrm8488'}, {'type': 'text', 'value': ' found a simple solution: add the target language to the pre-query template.', 'raw': ' found a simple solution: add the target language to the pre-query template.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For Spanish, the template becomes ""<|begin_of_text|><|start_header_id|>user<|end_header_id|>spanish:"".', 'raw': 'For Spanish, the template becomes ""<|begin_of_text|><|start_header_id|>user<|end_header_id|>spanish:"".'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This method works for Spanish and German!', 'raw': 'This method works for Spanish and German!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '❌ Unfortunately, it does not work well for other languages (🇮🇹, 🇳🇱, ...)', 'raw': '❌ Unfortunately, it does not work well for other languages (🇮🇹, 🇳🇱, ...)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👇', 'raw': '👇'}]","Ok, you're finally convinced that synthetic data works... ⚗️ + +𝐍𝐨𝐰 𝐲𝐨𝐮 𝐰𝐚𝐧𝐭 𝐭𝐨 𝐠𝐞𝐧𝐞𝐫𝐚𝐭𝐞 𝐚𝐧 𝐢𝐧𝐬𝐭𝐫𝐮𝐜𝐭𝐢𝐨𝐧 𝐝𝐚𝐭𝐚𝐬𝐞𝐭 𝐟𝐨𝐫 𝐟𝐢𝐧𝐞-𝐭𝐮𝐧𝐢𝐧𝐠 𝐢𝐧 𝐚 𝐥𝐚𝐧𝐠𝐮𝐚𝐠𝐞 𝐨𝐭𝐡𝐞𝐫 𝐭𝐡𝐚𝐧 𝐄𝐧𝐠𝐥𝐢𝐬𝐡. +But how do you get started? + +I explore how to do this with Magpie in my new article +https://huggingface.co/blog/anakin87/multilingual-magpie + +--- + +🐦‍⬛ 𝐖𝐡𝐚𝐭 𝐢𝐬 𝐌𝐚𝐠𝐩𝐢𝐞? + +It's a recent technique for creating synthetic instruction datasets. + +Magpie is based on a simple but ingenious idea 👇 +if you prompt an instruction-tuned model with a pre-query template, you can make it generate a plausible user query/instruction + +Here's an example: +model: Llama-3-8B-Instruct +pre-query template: ""<|begin_of_text|><|start_header_id|>user<|end_header_id|>"" +generated user instruction: ""What are some of the responsibilities of a commercial pilot?"" + +You can then feed this instruction back into the same model to get the assistant response. + +By repeating this process, it's possible to generate large synthetic datasets with relatively little effort. + +🪄 The authors demonstrate that using these datasets for Supervised Fine Tuning (SFT) can yield strong performance, even competitive with the original instruct model. + + +🧗𝐆𝐞𝐧𝐞𝐫𝐚𝐭𝐢𝐧𝐠 𝐧𝐨𝐧-𝐄𝐧𝐠𝐥𝐢𝐬𝐡 𝐝𝐚𝐭𝐚 + +Most Language Models are primarily trained on English texts, so they tend to produce data in English. + +How can we overcome this? + +Earlier approaches were complex or costly. + +Then @mrm8488 found a simple solution: add the target language to the pre-query template. +For Spanish, the template becomes ""<|begin_of_text|><|start_header_id|>user<|end_header_id|>spanish:"". + +This method works for Spanish and German! + +❌ Unfortunately, it does not work well for other languages (🇮🇹, 🇳🇱, ...) + +👇","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626505d493e0b04d75710566/LOu8cDPoLgWBFBsANIypB.png'}]","[{'_id': '5e4318d616b09a31220980d6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e4318d616b09a31220980d6/24rMJ_vPh3gW9ZEmj64xr.png', 'fullname': 'Manuel Romero', 'name': 'mrm8488', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3180}]","[{'reaction': '👀', 'users': ['John6666', 'tiendung', 'djuna', 'jgitsolutions'], 'count': 4}, {'reaction': '🤯', 'users': ['pacificg'], 'count': 1}]",2024-10-21 10:38:15,2024-10-21 15:50:47.492,"[{'_id': '626505d493e0b04d75710566', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626505d493e0b04d75710566/9rfJc9ORXU9J5a42Ev3v6.png', 'fullname': 'Stefano Fiorucci', 'name': 'anakin87', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 118, 'isFollowing': False}]",/posts/anakin87/354562758942371,1111,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/653cd3049107029eb004f968/Y4XphXmk8P51GlIi6u9cd.png,28.0,Rickard Edén,neph1,741785107403373,"[{'type': 'text', 'value': 'Bellman, the Swedish finetune, has once again returned in his biggest incarnation yet, at 12b. Based on Mistral-Nemo-Instruct: ', 'raw': 'Bellman, the Swedish finetune, has once again returned in his biggest incarnation yet, at 12b. Based on Mistral-Nemo-Instruct: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'neph1/Mistral-Nemo-Instruct-bellman-12b'}, 'url': 'https://huggingface.co/neph1/Mistral-Nemo-Instruct-bellman-12b', 'raw': 'https://huggingface.co/neph1/Mistral-Nemo-Instruct-bellman-12b'}]","Bellman, the Swedish finetune, has once again returned in his biggest incarnation yet, at 12b. Based on Mistral-Nemo-Instruct: https://huggingface.co/neph1/Mistral-Nemo-Instruct-bellman-12b",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-21 10:10:00,2024-10-21 10:10:00.797,[],/posts/neph1/741785107403373,608,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg,112.0,atayloraerospace,Taylor658,852879216298219,"[{'type': 'text', 'value': 'Spent the weekend testing out some prompts with 🕵️\u200d♂️Mystery Bot🕵️\u200d♂️ on my mobile... exciting things are coming soon for the following languages:', 'raw': 'Spent the weekend testing out some prompts with 🕵️\u200d♂️Mystery Bot🕵️\u200d♂️ on my mobile... exciting things are coming soon for the following languages:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐Arabic, Chinese, Czech, Dutch, English French, German, Greek, Hebrew, Hindi, Indonesian, Italian, Japanese, Korean, Persian, Polish, Portuguese, Romanian, Russian, Spanish, Turkish, Ukrainian, and Vietnamese!🌐', 'raw': '🌐Arabic, Chinese, Czech, Dutch, English French, German, Greek, Hebrew, Hindi, Indonesian, Italian, Japanese, Korean, Persian, Polish, Portuguese, Romanian, Russian, Spanish, Turkish, Ukrainian, and Vietnamese!🌐'}]","Spent the weekend testing out some prompts with 🕵️‍♂️Mystery Bot🕵️‍♂️ on my mobile... exciting things are coming soon for the following languages: + +🌐Arabic, Chinese, Czech, Dutch, English French, German, Greek, Hebrew, Hindi, Indonesian, Italian, Japanese, Korean, Persian, Polish, Portuguese, Romanian, Russian, Spanish, Turkish, Ukrainian, and Vietnamese!🌐","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/5N9iUDEhtkv3V5hoykr0V.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/N4mFkqTrKI5LsKAi1KSea.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/9YxMq08oiCh3UbQ52_DLr.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/JnCVfMEUyYUF2ikj4hqYM.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/5f_HAq3F-bjpjM5MaBJbv.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/_S1iACcsP77CHJZ34Y2GN.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/f6j2tjvNGveRTpCtEtz3f.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'sitloboi2012', 'kenza-ily', 'shivalikasingh', 'bisnotforbella', 'Ercin', 'apol', 'nanyy1025'], 'count': 8}, {'reaction': '🔥', 'users': ['shivalikasingh', 'nanyy1025'], 'count': 2}, {'reaction': '❤️', 'users': ['shivalikasingh', 'nanyy1025'], 'count': 2}, {'reaction': '🚀', 'users': ['shivalikasingh', 'nanyy1025'], 'count': 2}]",2024-10-21 04:49:02,2024-10-21 04:49:02.878,[],/posts/Taylor658/852879216298219,2542,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,259122760899906,"[{'type': 'text', 'value': 'I’m recently experimenting with the Flux-Ultra Realism and Real Anime LoRA models, using the Flux.1-dev model as the base. The model and its demo example are provided in the Flux LoRA DLC collections.📃', 'raw': 'I’m recently experimenting with the Flux-Ultra Realism and Real Anime LoRA models, using the Flux.1-dev model as the base. The model and its demo example are provided in the Flux LoRA DLC collections.📃'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥳Demo : 🔗 ', 'raw': '🥳Demo : 🔗 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prithivMLmods/FLUX-LoRA-DLC'}, 'url': 'https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC', 'raw': 'https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥳Model: ', 'raw': '🥳Model: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Canopus-LoRA-Flux-UltraRealism-2.0'}, 'url': 'https://huggingface.co/prithivMLmods/Canopus-LoRA-Flux-UltraRealism-2.0', 'raw': 'https://huggingface.co/prithivMLmods/Canopus-LoRA-Flux-UltraRealism-2.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'prithivMLmods/Flux-Dev-Real-Anime-LoRA'}, 'url': 'https://huggingface.co/prithivMLmods/Flux-Dev-Real-Anime-LoRA', 'raw': 'https://huggingface.co/prithivMLmods/Flux-Dev-Real-Anime-LoRA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥳For more details, please visit the README.md of the Flux LoRA DLC Space & ', 'raw': '🥳For more details, please visit the README.md of the Flux LoRA DLC Space & '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32'}, 'url': 'https://huggingface.co/collections/prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32', 'raw': 'https://huggingface.co/collections/prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32'}]","I’m recently experimenting with the Flux-Ultra Realism and Real Anime LoRA models, using the Flux.1-dev model as the base. The model and its demo example are provided in the Flux LoRA DLC collections.📃 + +🥳Demo : 🔗 https://huggingface.co/spaces/prithivMLmods/FLUX-LoRA-DLC + +🥳Model: +- https://huggingface.co/prithivMLmods/Canopus-LoRA-Flux-UltraRealism-2.0 +- https://huggingface.co/prithivMLmods/Flux-Dev-Real-Anime-LoRA + +🥳For more details, please visit the README.md of the Flux LoRA DLC Space & https://huggingface.co/collections/prithivMLmods/lora-space-collections-6714b72e0d49e1c97fbd6a32","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/-OHlyIsjCL6CoDvrhXvD1.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/LRpVxO4KxHAQ8EgLFFtMu.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/GQ4WL5s9DR7UcZJakqzPJ.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/9mtv4aFpZGRssehT_VOuZ.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/CWGeCWZEqAOKnYe3_-1xE.webp'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/YvTy_ymz-G6eQ5eXv_QeS.webp'}]",[],"[{'reaction': '👍', 'users': ['John6666', 'prithivMLmods', 'Rhyzhkovaa', 'den0620', 'AdinaY', 'chethan62'], 'count': 6}, {'reaction': '🚀', 'users': ['jematos92', 'den0620', 'Stopwolf', 'AdinaY', 'prithivMLmods'], 'count': 5}, {'reaction': '🔥', 'users': ['Minhajameen', 'Stopwolf', 'AdinaY', 'prithivMLmods'], 'count': 4}]",2024-10-20 19:44:10,2024-10-23 03:34:47.622,"[{'_id': '67186e2e5433befe1ee225a4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/67186e2e5433befe1ee225a4/kWRftAYDNf_HJZgaJQM2A.jpeg', 'fullname': 'Muhammad Niyaz', 'name': 'sajjad112233', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/prithivMLmods/259122760899906,3975,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,338189899958661,"[{'type': 'text', 'value': '🌐 Introducing Websim.ai User Projects Dataset - ', 'raw': '🌐 Introducing Websim.ai User Projects Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/websim'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/websim', 'raw': 'https://huggingface.co/datasets/nyuuzyou/websim'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset highlights:', 'raw': 'Dataset highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 137,452 user projects from Websim.ai, a service for creating small sites using Large Language Models (LLMs)', 'raw': '- 137,452 user projects from Websim.ai, a service for creating small sites using Large Language Models (LLMs)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Primarily in English, with potential for multilingual content in generated websites', 'raw': '- Primarily in English, with potential for multilingual content in generated websites'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Each entry includes: project metadata, user information, and generated HTML content', 'raw': '- Each entry includes: project metadata, user information, and generated HTML content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Contains detailed information about project revisions, site generation, and user interactions', 'raw': '- Contains detailed information about project revisions, site generation, and user interactions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Data covers a wide range of user-generated website projects created through AI assistance', 'raw': '- Data covers a wide range of user-generated website projects created through AI assistance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dedicated to the public domain under Creative Commons Zero (CC0) license', 'raw': '- Dedicated to the public domain under Creative Commons Zero (CC0) license'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The dataset can be used for analyzing AI-assisted web development trends, studying user behavior in LLM-powered creative tools, and exploring the capabilities of language models in web design.', 'raw': 'The dataset can be used for analyzing AI-assisted web development trends, studying user behavior in LLM-powered creative tools, and exploring the capabilities of language models in web design.'}]","🌐 Introducing Websim.ai User Projects Dataset - https://huggingface.co/datasets/nyuuzyou/websim + +Dataset highlights: +- 137,452 user projects from Websim.ai, a service for creating small sites using Large Language Models (LLMs) +- Primarily in English, with potential for multilingual content in generated websites +- Each entry includes: project metadata, user information, and generated HTML content +- Contains detailed information about project revisions, site generation, and user interactions +- Data covers a wide range of user-generated website projects created through AI assistance +- Dedicated to the public domain under Creative Commons Zero (CC0) license + +The dataset can be used for analyzing AI-assisted web development trends, studying user behavior in LLM-powered creative tools, and exploring the capabilities of language models in web design.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'Eyel'], 'count': 2}]",2024-10-20 19:40:44,2024-10-20 19:40:44.655,[],/posts/nyuuzyou/338189899958661,1403,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/OMEqajG_I9VemRa-NndDs.png,2.0,Michael bollox,MichaelBoll,309802757493429,"[{'type': 'text', 'value': 'Gradio not scrollable on iOS', 'raw': 'Gradio not scrollable on iOS'}]",Gradio not scrollable on iOS,[],[],"[{'reaction': '🧠', 'users': ['John6666'], 'count': 1}]",2024-10-20 17:16:38,2024-10-20 23:27:03.491,"[{'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/MichaelBoll/309802757493429,668,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/Plq9HMxfc8JR9WaNf3hBH.png,18.0,PZ,philipp-zettl,948143434308209,"[{'type': 'text', 'value': 'This is probably a very hot take, but here goes nothing.', 'raw': 'This is probably a very hot take, but here goes nothing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With the incredibly accurate LoRAs we see emerge for high quality models like FLUX from services like fal.ai that offer training within single digit minutes, e.g. 2 min per 1000 iterations.', 'raw': 'With the incredibly accurate LoRAs we see emerge for high quality models like FLUX from services like fal.ai that offer training within single digit minutes, e.g. 2 min per 1000 iterations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Why the hell are people publishing private LoRAs as public models?!', 'raw': 'Why the hell are people publishing private LoRAs as public models?!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Take a look at this listing: ', 'raw': 'Take a look at this listing: '}, {'type': 'link', 'href': 'https://huggingface.co/models?other=base_model:adapter:black-forest-labs%2FFLUX.1-dev&sort=created', 'raw': 'https://huggingface.co/models?other=base_model:adapter:black-forest-labs%2FFLUX.1-dev&sort=created'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I would expect that people that hold a HF account have some kind of forward thinking. Heck, do you really want to give anyone the power to create ultra realistic images of yourself?!', 'raw': 'I would expect that people that hold a HF account have some kind of forward thinking. Heck, do you really want to give anyone the power to create ultra realistic images of yourself?!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Didn't we learn anything from social media? "", 'raw': ""Didn't we learn anything from social media? ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I am puzzled..', 'raw': 'I am puzzled..'}]","This is probably a very hot take, but here goes nothing. + +With the incredibly accurate LoRAs we see emerge for high quality models like FLUX from services like fal.ai that offer training within single digit minutes, e.g. 2 min per 1000 iterations. + +Why the hell are people publishing private LoRAs as public models?! +Take a look at this listing: https://huggingface.co/models?other=base_model:adapter:black-forest-labs%2FFLUX.1-dev&sort=created + +I would expect that people that hold a HF account have some kind of forward thinking. Heck, do you really want to give anyone the power to create ultra realistic images of yourself?! + +Didn't we learn anything from social media? +I am puzzled..",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-20 15:00:06,2024-10-22 21:12:10.871,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '6450c03a673b2bcfaf86977f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/Plq9HMxfc8JR9WaNf3hBH.png', 'fullname': 'PZ', 'name': 'philipp-zettl', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 18, 'isFollowing': False}]",/posts/philipp-zettl/948143434308209,1194,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png,290.0,Ankit Pal,aaditya,126778565806623,"[{'type': 'text', 'value': 'Last Week in Medical AI: Top LLM Research ', 'raw': 'Last Week in Medical AI: Top LLM Research '}, {'type': 'text', 'raw': 'Papers/Models', 'value': 'Papers/Models'}, {'type': 'text', 'value': ' 🔥', 'raw': ' 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅 (October 12 - October 19, 2024)', 'raw': '🏅 (October 12 - October 19, 2024)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM & Other Models:', 'raw': 'Medical LLM & Other Models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- OLAPH: Factual Biomedical LLM QA', 'raw': '- OLAPH: Factual Biomedical LLM QA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LLMD: Interpreting Longitudinal Medical Records', 'raw': '- LLMD: Interpreting Longitudinal Medical Records'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LifeGPT: Generative Transformer for Cells', 'raw': '- LifeGPT: Generative Transformer for Cells'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MedCare: Decoupled Clinical LLM Alignment', 'raw': '- MedCare: Decoupled Clinical LLM Alignment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Y-Mol: Biomedical LLM for Drug Development', 'raw': '- Y-Mol: Biomedical LLM for Drug Development'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Frameworks and Methodologies:', 'raw': 'Frameworks and Methodologies:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MedINST: Biomedical Instructions Meta Dataset', 'raw': '- MedINST: Biomedical Instructions Meta Dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Democratizing Medical LLMs via Language Experts', 'raw': '- Democratizing Medical LLMs via Language Experts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MCQG-SRefine: Iterative Question Generation', 'raw': '- MCQG-SRefine: Iterative Question Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Adaptive Medical Language Agents', 'raw': '- Adaptive Medical Language Agents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MeNTi: Medical LLM with Nested Tools', 'raw': '- MeNTi: Medical LLM with Nested Tools'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM Applications:', 'raw': 'Medical LLM Applications:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AGENTiGraph: LLM Chatbots with Private Data', 'raw': '- AGENTiGraph: LLM Chatbots with Private Data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MMed-RAG: Multimodal Medical RAG System', 'raw': '- MMed-RAG: Multimodal Medical RAG System'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Medical Graph RAG: Safe LLM via Retrieval', 'raw': '- Medical Graph RAG: Safe LLM via Retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MedAide: Multi-Agent Medical LLM Collaboration', 'raw': '- MedAide: Multi-Agent Medical LLM Collaboration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Synthetic Clinical Trial Generation', 'raw': '- Synthetic Clinical Trial Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLMs & Benchmarks:', 'raw': 'Medical LLMs & Benchmarks:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- WorldMedQA-V: Multimodal Medical LLM Dataset', 'raw': '- WorldMedQA-V: Multimodal Medical LLM Dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- HEALTH-PARIKSHA: RAG Models Evaluation', 'raw': '- HEALTH-PARIKSHA: RAG Models Evaluation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Synthetic Data for Medical Vision-Language', 'raw': '- Synthetic Data for Medical Vision-Language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now you can watch and listen to the latest Medical AI papers daily on our YouTube and Spotify channels as well!', 'raw': 'Now you can watch and listen to the latest Medical AI papers daily on our YouTube and Spotify channels as well!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Youtube: ', 'raw': '- Youtube: '}, {'type': 'link', 'href': 'https://youtu.be/LROOjWXUgvg?si=s-nNDOSD3BrsHYjQ', 'raw': 'https://youtu.be/LROOjWXUgvg?si=s-nNDOSD3BrsHYjQ'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Spotify : ', 'raw': '- Spotify : '}, {'type': 'link', 'href': 'https://open.spotify.com/episode/12xeN2vnOTRdDrHbWqhV6I?si=bd7c8d9fee8049fd', 'raw': 'https://open.spotify.com/episode/12xeN2vnOTRdDrHbWqhV6I?si=bd7c8d9fee8049fd'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Last Week in Medical AI: Top LLM Research Papers/Models 🔥 +🏅 (October 12 - October 19, 2024) + +Medical LLM & Other Models: +- OLAPH: Factual Biomedical LLM QA +- LLMD: Interpreting Longitudinal Medical Records +- LifeGPT: Generative Transformer for Cells +- MedCare: Decoupled Clinical LLM Alignment +- Y-Mol: Biomedical LLM for Drug Development + +Frameworks and Methodologies: +- MedINST: Biomedical Instructions Meta Dataset +- Democratizing Medical LLMs via Language Experts +- MCQG-SRefine: Iterative Question Generation +- Adaptive Medical Language Agents +- MeNTi: Medical LLM with Nested Tools + +Medical LLM Applications: +- AGENTiGraph: LLM Chatbots with Private Data +- MMed-RAG: Multimodal Medical RAG System +- Medical Graph RAG: Safe LLM via Retrieval +- MedAide: Multi-Agent Medical LLM Collaboration +- Synthetic Clinical Trial Generation + +Medical LLMs & Benchmarks: +- WorldMedQA-V: Multimodal Medical LLM Dataset +- HEALTH-PARIKSHA: RAG Models Evaluation +- Synthetic Data for Medical Vision-Language + +Now you can watch and listen to the latest Medical AI papers daily on our YouTube and Spotify channels as well! + +- Youtube: https://youtu.be/LROOjWXUgvg?si=s-nNDOSD3BrsHYjQ +- Spotify : https://open.spotify.com/episode/12xeN2vnOTRdDrHbWqhV6I?si=bd7c8d9fee8049fd + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/Vj9HYQwSyZV-zcW_b_xdh.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['aaditya', 'JoPmt', 'Joseph717171', 'VISHNUDHAT'], 'count': 4}, {'reaction': '❤️', 'users': ['aaditya', 'benhachem', 'Joseph717171', 'shetumohanto'], 'count': 4}, {'reaction': '🚀', 'users': ['aaditya', 'John6666', 'Joseph717171'], 'count': 3}, {'reaction': '🤗', 'users': ['aaditya', 'Joseph717171'], 'count': 2}, {'reaction': '🧠', 'users': ['aaditya', 'Joseph717171'], 'count': 2}, {'reaction': '🤝', 'users': ['uDivy', 'aaditya'], 'count': 2}, {'reaction': '👍', 'users': ['Leotrim'], 'count': 1}]",2024-10-20 13:49:51,2024-10-30 14:04:26.109,"[{'_id': '63901114c3f0d3c3f0cda426', 'avatarUrl': '/avatars/9df46be074977d1506b8eb8402aaec19.svg', 'fullname': 'Surbhi Sharma', 'name': 'Surbhi123', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/aaditya/126778565806623,2762,,1 +/avatars/d773a7dd9b706759131fc482ab71ced7.svg,10.0,feboris946@vndem.com,Taf2023,504398154047309,"[{'type': 'text', 'value': 'I am here to provide you with the premium codes you want just by informing me your requirements. #program #AI #code', 'raw': 'I am here to provide you with the premium codes you want just by informing me your requirements. #program #AI #code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Premium_Code', 'raw': 'Premium_Code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://hf.co/chat/assistant/670fd7b2ad7fdbb38ff98102', 'raw': 'https://hf.co/chat/assistant/670fd7b2ad7fdbb38ff98102'}]","I am here to provide you with the premium codes you want just by informing me your requirements. #program #AI #code +Premium_Code +https://hf.co/chat/assistant/670fd7b2ad7fdbb38ff98102","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64841af2295256340e4b9f88/9ir5nncB5XhFRNl8Ozm9D.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['jhaayus'], 'count': 1}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-17 00:24:15,2024-10-17 00:24:15.894,[],/posts/Taf2023/504398154047309,1212,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,358729012419970,"[{'type': 'text', 'value': 'Hi there HuggingFacers!', 'raw': 'Hi there HuggingFacers!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have you ever dreamt of an improbable books crossover, like Frodo from 𝘓𝘰𝘳𝘥 𝘰𝘧 𝘵𝘩𝘦 𝘙𝘪𝘯𝘨𝘴 becoming the main character of the 𝘖𝘥𝘺𝘴𝘴𝘦𝘺 or Emma Bovary from 𝘔𝘢𝘥𝘢𝘮𝘦 𝘉𝘰𝘷𝘢𝘳𝘺 acting as a modern-days Shakespearean Juliet?', 'raw': 'Have you ever dreamt of an improbable books crossover, like Frodo from 𝘓𝘰𝘳𝘥 𝘰𝘧 𝘵𝘩𝘦 𝘙𝘪𝘯𝘨𝘴 becoming the main character of the 𝘖𝘥𝘺𝘴𝘴𝘦𝘺 or Emma Bovary from 𝘔𝘢𝘥𝘢𝘮𝘦 𝘉𝘰𝘷𝘢𝘳𝘺 acting as a modern-days Shakespearean Juliet?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Well, all of this is now possible! I'm thrilled to introduce my latest opensource product for storytelling: 𝐛𝐨𝐨𝐤𝐬-𝐦𝐢𝐱𝐞𝐫-𝐚𝐢 𝐯𝟎.𝟎.𝟎 !"", 'raw': ""Well, all of this is now possible! I'm thrilled to introduce my latest opensource product for storytelling: 𝐛𝐨𝐨𝐤𝐬-𝐦𝐢𝐱𝐞𝐫-𝐚𝐢 𝐯𝟎.𝟎.𝟎 !""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Built with ReactJS and shipped directly to you on Spaces thanks to Docker, this webapp combines the power of two AI tools:', 'raw': 'Built with ReactJS and shipped directly to you on Spaces thanks to Docker, this webapp combines the power of two AI tools:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- gpt-4o-mini by OpenAI, which takes care of cooking new and intriguing plots starting from the user's instructions, the titles and the summaries of the two books to mix (summaries are scraped through Wikipedia)"", 'raw': ""- gpt-4o-mini by OpenAI, which takes care of cooking new and intriguing plots starting from the user's instructions, the titles and the summaries of the two books to mix (summaries are scraped through Wikipedia)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- text2img realtime API by ModelsLab, which provides a stable diffusion pipeline to create a thumbnail for your newly-generated story', 'raw': '- text2img realtime API by ModelsLab, which provides a stable diffusion pipeline to create a thumbnail for your newly-generated story'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Everything is provided under a simple and intuitive UI, which uses chatscope's React template kit."", 'raw': ""Everything is provided under a simple and intuitive UI, which uses chatscope's React template kit.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Curious of trying? The app is already live at:', 'raw': 'Curious of trying? The app is already live at:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'as-cle-bert/books-mixer-ai'}, 'url': 'https://huggingface.co/spaces/as-cle-bert/books-mixer-ai', 'raw': 'https://huggingface.co/spaces/as-cle-bert/books-mixer-ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""And you can also have a tour of the GitHub repo (and leave a little ⭐ while you're there):"", 'raw': ""And you can also have a tour of the GitHub repo (and leave a little ⭐ while you're there):""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/AstraBert/books-mixer-ai', 'raw': 'https://github.com/AstraBert/books-mixer-ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The documentation is still under construction, but will become available soon😊', 'raw': 'The documentation is still under construction, but will become available soon😊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have fun!📚📚', 'raw': 'Have fun!📚📚'}]","Hi there HuggingFacers! + +Have you ever dreamt of an improbable books crossover, like Frodo from 𝘓𝘰����𝘥 𝘰𝘧 𝘵𝘩𝘦 𝘙𝘪𝘯𝘨𝘴 becoming the main character of the 𝘖𝘥𝘺𝘴𝘴𝘦𝘺 or Emma Bovary from 𝘔𝘢𝘥𝘢𝘮𝘦 𝘉𝘰𝘷𝘢𝘳𝘺 acting as a modern-days Shakespearean Juliet? + +Well, all of this is now possible! I'm thrilled to introduce my latest opensource product for storytelling: 𝐛𝐨𝐨𝐤𝐬-𝐦𝐢𝐱𝐞𝐫-𝐚𝐢 𝐯𝟎.𝟎.𝟎 ! + +Built with ReactJS and shipped directly to you on Spaces thanks to Docker, this webapp combines the power of two AI tools: + +- gpt-4o-mini by OpenAI, which takes care of cooking new and intriguing plots starting from the user's instructions, the titles and the summaries of the two books to mix (summaries are scraped through Wikipedia) +- text2img realtime API by ModelsLab, which provides a stable diffusion pipeline to create a thumbnail for your newly-generated story + +Everything is provided under a simple and intuitive UI, which uses chatscope's React template kit. +Curious of trying? The app is already live at: + +https://huggingface.co/spaces/as-cle-bert/books-mixer-ai + +And you can also have a tour of the GitHub repo (and leave a little ⭐ while you're there): + +https://github.com/AstraBert/books-mixer-ai + +The documentation is still under construction, but will become available soon😊 + +Have fun!📚📚","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65e330e7edc2f7306e252448/GPJRhtNqulKsBbVgzOF42.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'iamrobotbear'], 'count': 2}, {'reaction': '🧠', 'users': ['awacke1'], 'count': 1}, {'reaction': '👍', 'users': ['introvoyz041'], 'count': 1}]",2024-10-17 00:03:03,2024-10-17 00:03:03.513,[],/posts/as-cle-bert/358729012419970,1365,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,847164896381837,"[{'type': 'text', 'value': ""Today I was able to solve a very difficult coding session with GPT-4o which ended up solving integrations on a very large scale. So I decided to look a bit more into how its reasoners work. Below is a fun markdown emoji outline about what I learned today and what I'm pursuing. "", 'raw': ""Today I was able to solve a very difficult coding session with GPT-4o which ended up solving integrations on a very large scale. So I decided to look a bit more into how its reasoners work. Below is a fun markdown emoji outline about what I learned today and what I'm pursuing. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hope you enjoy! Cheers, Aaron.', 'raw': 'Hope you enjoy! Cheers, Aaron.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also here are my favorite last 4 spaces I am working on:', 'raw': 'Also here are my favorite last 4 spaces I am working on:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. GPT4O: ', 'raw': '1. GPT4O: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/GPT-4o-omni-text-audio-image-video'}, 'url': 'https://huggingface.co/spaces/awacke1/GPT-4o-omni-text-audio-image-video', 'raw': 'https://huggingface.co/spaces/awacke1/GPT-4o-omni-text-audio-image-video'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Claude: ', 'raw': '2. Claude: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/AnthropicClaude3.5Sonnet-ACW'}, 'url': 'https://huggingface.co/spaces/awacke1/AnthropicClaude3.5Sonnet-ACW', 'raw': 'https://huggingface.co/spaces/awacke1/AnthropicClaude3.5Sonnet-ACW'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. MSGraph M365: ', 'raw': '3. MSGraph M365: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/MSGraphAPI'}, 'url': 'https://huggingface.co/spaces/awacke1/MSGraphAPI', 'raw': 'https://huggingface.co/spaces/awacke1/MSGraphAPI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Azure Cosmos DB: Now with Research AI! ', 'raw': '4. Azure Cosmos DB: Now with Research AI! '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/AzureCosmosDBUI'}, 'url': 'https://huggingface.co/spaces/awacke1/AzureCosmosDBUI', 'raw': 'https://huggingface.co/spaces/awacke1/AzureCosmosDBUI'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""# 🚀 OpenAI's O1 Models: A Quantum Leap in AI"", 'raw': ""# 🚀 OpenAI's O1 Models: A Quantum Leap in AI""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""## 1. 🤔 From 🦜 to 🧠: O1's Evolution"", 'raw': ""## 1. 🤔 From 🦜 to 🧠: O1's Evolution""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Thinking AI**: O1 ponders before replying; GPT models just predict. 💡', 'raw': '- **Thinking AI**: O1 ponders before replying; GPT models just predict. 💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 2. 📚 AI Memory: 💾 + 🧩 = 🧠', 'raw': '## 2. 📚 AI Memory: 💾 + 🧩 = 🧠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Embeddings & Tokens**: Words ➡️ vectors, building knowledge. 📖', 'raw': '- **Embeddings & Tokens**: Words ➡️ vectors, building knowledge. 📖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 3. 🔍 Swift Knowledge Retrieval', 'raw': '## 3. 🔍 Swift Knowledge Retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Vector Search & Indexing**: O1 finds info fast, citing reliable sources. 🔎📖', 'raw': '- **Vector Search & Indexing**: O1 finds info fast, citing reliable sources. 🔎📖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 4. 🌳 Logic Trees with Mermaid Models', 'raw': '## 4. 🌳 Logic Trees with Mermaid Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Flowchart Reasoning**: O1 structures thoughts like diagrams. 🎨🌐', 'raw': '- **Flowchart Reasoning**: O1 structures thoughts like diagrams. 🎨🌐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 5. 💻 Coding Mastery', 'raw': '## 5. 💻 Coding Mastery'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Multilingual & Current**: Speaks many code languages, always up-to-date. 💻🔄', 'raw': '- **Multilingual & Current**: Speaks many code languages, always up-to-date. 💻🔄'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 6. 🏆 Breaking Records', 'raw': '## 6. 🏆 Breaking Records'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **92.3% MMLU Score**: O1 outperforms humans, setting new AI standards. 🏅', 'raw': '- **92.3% MMLU Score**: O1 outperforms humans, setting new AI standards. 🏅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 7. 💡 Versatile Applications', 'raw': '## 7. 💡 Versatile Applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Ultimate Assistant**: From fixing code to advancing research. 🛠️🔬', 'raw': '- **Ultimate Assistant**: From fixing code to advancing research. 🛠️🔬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## 8. 🏁 Racing Toward AGI', 'raw': '## 8. 🏁 Racing Toward AGI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **OpenAI Leads**: O1 brings us closer to true AI intelligence. 🚀', 'raw': '- **OpenAI Leads**: O1 brings us closer to true AI intelligence. 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""## 9. 🤖 O1's Reasoning Pillars"", 'raw': ""## 9. 🤖 O1's Reasoning Pillars""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **🧠 Chain of Thought**: Step-by-step logic.', 'raw': '- **🧠 Chain of Thought**: Step-by-step logic.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **🎲 MCTS**: Simulates options, picks best path.', 'raw': '- **🎲 MCTS**: Simulates options, picks best path.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **🔍 Reflection**: Self-improves autonomously.', 'raw': '- **🔍 Reflection**: Self-improves autonomously.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **🏋️\u200d♂️ Reinforcement Learning**: Gets smarter over time.', 'raw': '- **🏋️\u200d♂️ Reinforcement Learning**: Gets smarter over time.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '---', 'raw': '---'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '*Stay curious, keep coding!* 🚀', 'raw': '*Stay curious, keep coding!* 🚀'}, {'type': 'new_line', 'raw': '\n'}]","Today I was able to solve a very difficult coding session with GPT-4o which ended up solving integrations on a very large scale. So I decided to look a bit more into how its reasoners work. Below is a fun markdown emoji outline about what I learned today and what I'm pursuing. + +Hope you enjoy! Cheers, Aaron. + +Also here are my favorite last 4 spaces I am working on: +1. GPT4O: https://huggingface.co/spaces/awacke1/GPT-4o-omni-text-audio-image-video +2. Claude: + https://huggingface.co/spaces/awacke1/AnthropicClaude3.5Sonnet-ACW +3. MSGraph M365: https://huggingface.co/spaces/awacke1/MSGraphAPI +4. Azure Cosmos DB: Now with Research AI! https://huggingface.co/spaces/awacke1/AzureCosmosDBUI + +# 🚀 OpenAI's O1 Models: A Quantum Leap in AI + +## 1. 🤔 From 🦜 to 🧠: O1's Evolution + +- **Thinking AI**: O1 ponders before replying; GPT models just predict. 💡 + +## 2. 📚 AI Memory: 💾 + 🧩 = 🧠 + +- **Embeddings & Tokens**: Words ➡️ vectors, building knowledge. 📖 + +## 3. 🔍 Swift Knowledge Retrieval + +- **Vector Search & Indexing**: O1 finds info fast, citing reliable sources. 🔎📖 + +## 4. 🌳 Logic Trees with Mermaid Models + +- **Flowchart Reasoning**: O1 structures thoughts like diagrams. 🎨🌐 + +## 5. 💻 Coding Mastery + +- **Multilingual & Current**: Speaks many code languages, always up-to-date. 💻🔄 + +## 6. 🏆 Breaking Records + +- **92.3% MMLU Score**: O1 outperforms humans, setting new AI standards. 🏅 + +## 7. 💡 Versatile Applications + +- **Ultimate Assistant**: From fixing code to advancing research. 🛠️🔬 + +## 8. 🏁 Racing Toward AGI + +- **OpenAI Leads**: O1 brings us closer to true AI intelligence. 🚀 + +## 9. 🤖 O1's Reasoning Pillars + +- **🧠 Chain of Thought**: Step-by-step logic. +- **🎲 MCTS**: Simulates options, picks best path. +- **🔍 Reflection**: Self-improves autonomously. +- **🏋️‍♂️ Reinforcement Learning**: Gets smarter over time. + +--- + +*Stay curious, keep coding!* 🚀 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/w0sQsuk0PTM_cUNU8EAGS.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-16 23:20:13,2024-12-02 08:32:16.398,[],/posts/awacke1/847164896381837,721,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg,7068.0,Bartowski,bartowski,614979545779626,"[{'type': 'text', 'value': 'In regards to the latest mistral model and GGUFs for it:', 'raw': 'In regards to the latest mistral model and GGUFs for it:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Yes, they may be subpar and may require changes to llama.cpp to support the interleaved sliding window', 'raw': 'Yes, they may be subpar and may require changes to llama.cpp to support the interleaved sliding window'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Yes, I got excited when a conversion worked and released them ASAP', 'raw': 'Yes, I got excited when a conversion worked and released them ASAP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'That said, generation seems to work right now and seems to mimic the output from spaces that are running the original model', 'raw': 'That said, generation seems to work right now and seems to mimic the output from spaces that are running the original model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I have appended -TEST to the model names in an attempt to indicate that they are not final or perfect, but if people still feel mislead and that it's not the right thing to do, please post (civilly) below your thoughts, I will highly consider pulling the conversions if that's what people think is best. After all, that's what I'm here for, in service to you all !"", 'raw': ""I have appended -TEST to the model names in an attempt to indicate that they are not final or perfect, but if people still feel mislead and that it's not the right thing to do, please post (civilly) below your thoughts, I will highly consider pulling the conversions if that's what people think is best. After all, that's what I'm here for, in service to you all !""}]","In regards to the latest mistral model and GGUFs for it: + +Yes, they may be subpar and may require changes to llama.cpp to support the interleaved sliding window + +Yes, I got excited when a conversion worked and released them ASAP + +That said, generation seems to work right now and seems to mimic the output from spaces that are running the original model + +I have appended -TEST to the model names in an attempt to indicate that they are not final or perfect, but if people still feel mislead and that it's not the right thing to do, please post (civilly) below your thoughts, I will highly consider pulling the conversions if that's what people think is best. After all, that's what I'm here for, in service to you all !",[],[],"[{'reaction': '❤️', 'users': ['YaTharThShaRma999', 'johnnyc3p', 'John6666', 'SporkySporkness', 'Delta-Vector', 'Firepal3D', 'vonjack', 'driib', 'vsenn', 'G30', 'bruceunx', 'victor', 'bigZos', 'Maxxim69', 'pierpaolo', 'heavensnight', 'AtAndDev'], 'count': 17}, {'reaction': '🔥', 'users': ['awacke1', 'John6666', 'Delta-Vector', 'GoDjMike', 'G30', 'AtAndDev'], 'count': 6}, {'reaction': '👍', 'users': ['Yuma42', 'AtAndDev'], 'count': 2}]",2024-10-16 22:47:30,2024-10-21 20:36:54.599,"[{'_id': '64e4eb3e68df7c48dca95271', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/fOsPIFqHjBq3gsx3_6lHd.png', 'fullname': 'Chad Canning', 'name': 'fusi0n', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '66c26b6fb01b19d8c3c2467b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66c26b6fb01b19d8c3c2467b/fCFcHHgkDBhGZvjeIIbwN.png', 'fullname': 'Mango', 'name': 'Delta-Vector', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 185, 'isFollowing': False}, {'_id': '6435718aaaef013d1aec3b8b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg', 'fullname': 'Bartowski', 'name': 'bartowski', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7068, 'isFollowing': False}, {'_id': '657dec9df4f72f2c4c1a5761', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/iB4Kdg31amSkGhSik6fv2.jpeg', 'fullname': 'Jörmungandr', 'name': 'Midgardsormr', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '6342f2367bdcbf6a1cb4b245', 'avatarUrl': '/avatars/c53d7b7ee7bf757dea5f847e62ea96b9.svg', 'fullname': 'Ding Dong', 'name': 'xxx31dingdong', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/bartowski/614979545779626,23928,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,271267147746650,"[{'type': 'text', 'value': '🔥 Meta AI just blessed us with CoTracker v3, bleeding edge point tracking foundation model 🤩', 'raw': '🔥 Meta AI just blessed us with CoTracker v3, bleeding edge point tracking foundation model 🤩'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'model: ', 'raw': 'model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'facebook/cotracker3'}, 'url': 'https://huggingface.co/facebook/cotracker3', 'raw': 'https://huggingface.co/facebook/cotracker3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'demo: ', 'raw': 'demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'facebook/cotracker'}, 'url': 'https://huggingface.co/spaces/facebook/cotracker', 'raw': 'https://huggingface.co/spaces/facebook/cotracker'}]","🔥 Meta AI just blessed us with CoTracker v3, bleeding edge point tracking foundation model 🤩 +model: https://huggingface.co/facebook/cotracker3 +demo: https://huggingface.co/spaces/facebook/cotracker",[],[],"[{'reaction': '👀', 'users': ['John6666', 'jgitsolutions', 'KvrParaskevi', 'Rostenbach', 'LeonceNsh'], 'count': 5}, {'reaction': '🚀', 'users': ['Ar4ikov'], 'count': 1}]",2024-10-16 21:00:37,2024-10-16 21:00:37.858,[],/posts/merve/271267147746650,1817,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,972049092550324,"[{'type': 'text', 'value': 'Today I found out about the existence of ', 'raw': 'Today I found out about the existence of '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'utter-project/EuroLLM-1.7B-Instruct'}, 'url': 'https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct', 'raw': 'https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct'}, {'type': 'text', 'value': "" and unexpectedly it is really good. I think it's a very underrated model - give it a try "", 'raw': "" and unexpectedly it is really good. I think it's a very underrated model - give it a try ""}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'nyuuzyou/EuroLLM-1.7B-Instruct'}, 'url': 'https://huggingface.co/spaces/nyuuzyou/EuroLLM-1.7B-Instruct', 'raw': 'https://huggingface.co/spaces/nyuuzyou/EuroLLM-1.7B-Instruct'}]",Today I found out about the existence of https://huggingface.co/utter-project/EuroLLM-1.7B-Instruct and unexpectedly it is really good. I think it's a very underrated model - give it a try https://huggingface.co/spaces/nyuuzyou/EuroLLM-1.7B-Instruct,[],[],"[{'reaction': '👀', 'users': ['John6666', 'attashe'], 'count': 2}, {'reaction': '👍', 'users': ['prithivMLmods'], 'count': 1}]",2024-10-16 20:12:18,2024-10-16 20:12:18.711,[],/posts/nyuuzyou/972049092550324,845,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png,159.0,Richard A Aragon,TuringsSolutions,483705235958803,"[{'type': 'text', 'value': 'Ever wondered how neural networks actually work under the hood? ', 'raw': 'Ever wondered how neural networks actually work under the hood? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In my latest video, I break down the core mathematical concepts behind neural networks in a way that's easy for IT professionals to understand. We'll explore:"", 'raw': ""In my latest video, I break down the core mathematical concepts behind neural networks in a way that's easy for IT professionals to understand. We'll explore:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Neurons as logic gates', 'raw': '- Neurons as logic gates'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Weighted sums and activation functions', 'raw': '- Weighted sums and activation functions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Gradient descent and backpropagation', 'raw': '- Gradient descent and backpropagation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'No complex equations or jargon, just clear explanations and helpful visuals! ', 'raw': 'No complex equations or jargon, just clear explanations and helpful visuals! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Watch now and unlock the mysteries of neural networks: ', 'raw': '➡️ Watch now and unlock the mysteries of neural networks: '}, {'type': 'link', 'href': 'https://youtu.be/L5_I1ZHoGnM', 'raw': 'https://youtu.be/L5_I1ZHoGnM'}]","Ever wondered how neural networks actually work under the hood? + +In my latest video, I break down the core mathematical concepts behind neural networks in a way that's easy for IT professionals to understand. We'll explore: + +- Neurons as logic gates +- Weighted sums and activation functions +- Gradient descent and backpropagation + +No complex equations or jargon, just clear explanations and helpful visuals! + +➡️ Watch now and unlock the mysteries of neural networks: https://youtu.be/L5_I1ZHoGnM",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'crystal99', 'Joseph717171'], 'count': 3}, {'reaction': '😔', 'users': ['takeraparterer'], 'count': 1}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['ZeroWw'], 'count': 1}]",2024-10-16 17:42:37,2024-10-16 17:42:37.969,[],/posts/TuringsSolutions/483705235958803,1390,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63171caf1cc81c5e95ed7b92/29I5Lr0vLRcQR7AfCZcYj.jpeg,14.0,Akim Mousterou,AkimfromParis,843028961140234,"[{'type': 'text', 'value': 'Philosopher Gilles Deleuze in 1985-86 about society of control, probabilities, and power. Visionary words in an era of autoregressive models:', 'raw': 'Philosopher Gilles Deleuze in 1985-86 about society of control, probabilities, and power. Visionary words in an era of autoregressive models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '""The biopolitics of populations appears when right sets about administering life, says Foucault, administering life in any open multiplicities whatever. You see the importance of the difference between discipline and biopolitics. The one is in an open space, with large multiplicities to which limits are not assignable. They can only be treated by the calculus of probabilities, hence the development of the calculus of probabilities and the meaning [sens] of the social control of probabilities, the probabilities of marriage in a nation, the probabilities of mortality, probabilities of natality. Natality, nuptiality, mortality …', 'raw': '""The biopolitics of populations appears when right sets about administering life, says Foucault, administering life in any open multiplicities whatever. You see the importance of the difference between discipline and biopolitics. The one is in an open space, with large multiplicities to which limits are not assignable. They can only be treated by the calculus of probabilities, hence the development of the calculus of probabilities and the meaning [sens] of the social control of probabilities, the probabilities of marriage in a nation, the probabilities of mortality, probabilities of natality. Natality, nuptiality, mortality …'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '... When Foucault directly addresses the question of power, namely, one of his great theses: no, power does not repress, or it represses only secondarily. What does it do? It does something much more profound and, doubtless, more formidable that repressing: it forms, it shapes. It does not silence, it does worse: it makes speak. It disciplines, it standardizes [normalise]. But repression is entirely secondary in relation to the positive operations of power.', 'raw': '... When Foucault directly addresses the question of power, namely, one of his great theses: no, power does not repress, or it represses only secondarily. What does it do? It does something much more profound and, doubtless, more formidable that repressing: it forms, it shapes. It does not silence, it does worse: it makes speak. It disciplines, it standardizes [normalise]. But repression is entirely secondary in relation to the positive operations of power.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Power does not repress, it disciplines, it manages, it controls, it standardizes, etcetera. It does not silence, it makes speak. It does not prevent acting, it makes act."" ', 'raw': 'Power does not repress, it disciplines, it manages, it controls, it standardizes, etcetera. It does not silence, it makes speak. It does not prevent acting, it makes act."" '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'From the Deleuze Seminars at Université Paris 8 translated by Purdue University -> ', 'raw': 'From the Deleuze Seminars at Université Paris 8 translated by Purdue University -> '}, {'type': 'link', 'href': 'https://deleuze.cla.purdue.edu/', 'raw': 'https://deleuze.cla.purdue.edu/'}]","Philosopher Gilles Deleuze in 1985-86 about society of control, probabilities, and power. Visionary words in an era of autoregressive models: + +""The biopolitics of populations appears when right sets about administering life, says Foucault, administering life in any open multiplicities whatever. You see the importance of the difference between discipline and biopolitics. The one is in an open space, with large multiplicities to which limits are not assignable. They can only be treated by the calculus of probabilities, hence the development of the calculus of probabilities and the meaning [sens] of the social control of probabilities, the probabilities of marriage in a nation, the probabilities of mortality, probabilities of natality. Natality, nuptiality, mortality … + +... When Foucault directly addresses the question of power, namely, one of his great theses: no, power does not repress, or it represses only secondarily. What does it do? It does something much more profound and, doubtless, more formidable that repressing: it forms, it shapes. It does not silence, it does worse: it makes speak. It disciplines, it standardizes [normalise]. But repression is entirely secondary in relation to the positive operations of power. + +Power does not repress, it disciplines, it manages, it controls, it standardizes, etcetera. It does not silence, it makes speak. It does not prevent acting, it makes act."" + +From the Deleuze Seminars at Université Paris 8 translated by Purdue University -> https://deleuze.cla.purdue.edu/",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-16 13:44:16,2024-10-16 13:44:16.061,[],/posts/AkimfromParis/843028961140234,628,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,600930862864914,"[{'type': 'text', 'value': 'The Synthetic Data Generator now directly integrates with Argilla, so you can generate and curate your own high-quality datasets from pure natural language!', 'raw': 'The Synthetic Data Generator now directly integrates with Argilla, so you can generate and curate your own high-quality datasets from pure natural language!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Up next -> include dataset generation for text classification.', 'raw': 'Up next -> include dataset generation for text classification.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Other suggestions? Let us know.', 'raw': 'Other suggestions? Let us know.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Space: ', 'raw': 'Space: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/argilla/synthetic-data-generator', 'raw': 'https://huggingface.co/spaces/argilla/synthetic-data-generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","The Synthetic Data Generator now directly integrates with Argilla, so you can generate and curate your own high-quality datasets from pure natural language! + +Up next -> include dataset generation for text classification. +Other suggestions? Let us know. + +Space: https://huggingface.co/spaces/argilla/synthetic-data-generator + + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/634ff41ff32062e9eb7b06a3/e1a4UwPu_AchxuoOSkTBg.mp4'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'davidberenstein1957', 'clem'], 'count': 3}]",2024-10-16 10:42:56,2024-10-16 10:42:56.017,[],/posts/davidberenstein1957/600930862864914,686,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,605522997356241,"[{'type': 'text', 'value': 'While Google\'s Transformer might have introduced ""Attention is all you need,"" Microsoft and Tsinghua University are here with the DIFF Transformer, stating, ""Sparse-Attention is all you need.""', 'raw': 'While Google\'s Transformer might have introduced ""Attention is all you need,"" Microsoft and Tsinghua University are here with the DIFF Transformer, stating, ""Sparse-Attention is all you need.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The DIFF Transformer outperforms traditional Transformers in scaling properties, requiring only about 65% of the model size or training tokens to achieve comparable performance.', 'raw': 'The DIFF Transformer outperforms traditional Transformers in scaling properties, requiring only about 65% of the model size or training tokens to achieve comparable performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The secret sauce? A differential attention mechanism that amplifies focus on relevant context while canceling out noise, leading to sparser and more effective attention patterns.', 'raw': 'The secret sauce? A differential attention mechanism that amplifies focus on relevant context while canceling out noise, leading to sparser and more effective attention patterns.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How?', 'raw': 'How?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It uses two separate softmax attention maps and subtracts them.', 'raw': '- It uses two separate softmax attention maps and subtracts them.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It employs a learnable scalar λ for balancing the attention maps.', 'raw': '- It employs a learnable scalar λ for balancing the attention maps.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It implements GroupNorm for each attention head independently.', 'raw': '- It implements GroupNorm for each attention head independently.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It is compatible with FlashAttention for efficient computation.', 'raw': '- It is compatible with FlashAttention for efficient computation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What do you get?', 'raw': 'What do you get?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Superior long-context modeling (up to 64K tokens).', 'raw': '- Superior long-context modeling (up to 64K tokens).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Enhanced key information retrieval.', 'raw': '- Enhanced key information retrieval.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Reduced hallucination in question-answering and summarization tasks.', 'raw': '- Reduced hallucination in question-answering and summarization tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- More robust in-context learning, less affected by prompt order.', 'raw': '- More robust in-context learning, less affected by prompt order.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Mitigation of activation outliers, opening doors for efficient quantization.', 'raw': '- Mitigation of activation outliers, opening doors for efficient quantization.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Extensive experiments show DIFF Transformer's advantages across various tasks and model sizes, from 830M to 13.1B parameters."", 'raw': ""Extensive experiments show DIFF Transformer's advantages across various tasks and model sizes, from 830M to 13.1B parameters.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This innovative architecture could be a game-changer for the next generation of LLMs. What are your thoughts on DIFF Transformer's potential impact?"", 'raw': ""This innovative architecture could be a game-changer for the next generation of LLMs. What are your thoughts on DIFF Transformer's potential impact?""}]","While Google's Transformer might have introduced ""Attention is all you need,"" Microsoft and Tsinghua University are here with the DIFF Transformer, stating, ""Sparse-Attention is all you need."" + +The DIFF Transformer outperforms traditional Transformers in scaling properties, requiring only about 65% of the model size or training tokens to achieve comparable performance. + +The secret sauce? A differential attention mechanism that amplifies focus on relevant context while canceling out noise, leading to sparser and more effective attention patterns. + +How? +- It uses two separate softmax attention maps and subtracts them. +- It employs a learnable scalar λ for balancing the attention maps. +- It implements GroupNorm for each attention head independently. +- It is compatible with FlashAttention for efficient computation. + +What do you get? +- Superior long-context modeling (up to 64K tokens). +- Enhanced key information retrieval. +- Reduced hallucination in question-answering and summarization tasks. +- More robust in-context learning, less affected by prompt order. +- Mitigation of activation outliers, opening doors for efficient quantization. + +Extensive experiments show DIFF Transformer's advantages across various tasks and model sizes, from 830M to 13.1B parameters. + +This innovative architecture could be a game-changer for the next generation of LLMs. What are your thoughts on DIFF Transformer's potential impact?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/BewC28D4diwrp34EP85pF.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'privategeek24', 'calmodovar', 'den0620', 'Hampetiudo', 'DeathGodlike', 'cctuan'], 'count': 7}]",2024-10-16 08:04:37,2024-10-16 21:48:03.346,"[{'_id': '64137e2150358a805203cbac', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64137e2150358a805203cbac/w9RQx8Q07UvgFyIZ3ce_k.jpeg', 'fullname': 'Jade', 'name': 'euclaise', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 102, 'isFollowing': False}]",/posts/singhsidhukuldeep/605522997356241,2166,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,241931071917176,"[{'type': 'text', 'value': 'Some exciting news...', 'raw': 'Some exciting news...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We are open-sourcing The Little Book of ML Metrics! 🎉', 'raw': 'We are open-sourcing The Little Book of ML Metrics! 🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The book that will be on every data scientist's desk is open source."", 'raw': ""The book that will be on every data scientist's desk is open source.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What does that mean?', 'raw': 'What does that mean?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It means hundreds of people can review it, contribute to it, and help us improve it before it's finished!"", 'raw': ""It means hundreds of people can review it, contribute to it, and help us improve it before it's finished!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This also means that everyone will have free access to the digital version!', 'raw': 'This also means that everyone will have free access to the digital version!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Meanwhile, the high-quality printed edition will be available for purchase as it has been for a while. ', 'raw': 'Meanwhile, the high-quality printed edition will be available for purchase as it has been for a while. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Revenue from printed copies will help us support further development and maintenance of the book. Not to mention that reviewers and contributors will receive revenue sharing through their affiliate links. 🙌', 'raw': 'Revenue from printed copies will help us support further development and maintenance of the book. Not to mention that reviewers and contributors will receive revenue sharing through their affiliate links. 🙌'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the book repo (make sure to leave a star 🌟): ', 'raw': 'Check out the book repo (make sure to leave a star 🌟): '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/NannyML/The-Little-Book-of-ML-Metrics', 'raw': 'https://github.com/NannyML/The-Little-Book-of-ML-Metrics'}]","Some exciting news... + +We are open-sourcing The Little Book of ML Metrics! 🎉 + +The book that will be on every data scientist's desk is open source. + +What does that mean? + +It means hundreds of people can review it, contribute to it, and help us improve it before it's finished! + +This also means that everyone will have free access to the digital version! + +Meanwhile, the high-quality printed edition will be available for purchase as it has been for a while. + +Revenue from printed copies will help us support further development and maintenance of the book. Not to mention that reviewers and contributors will receive revenue sharing through their affiliate links. 🙌 + +Check out the book repo (make sure to leave a star 🌟): + +https://github.com/NannyML/The-Little-Book-of-ML-Metrics","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/ULtWYaBatSiJtpnfaNySQ.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-11 16:12:21,2024-10-11 16:12:21.741,[],/posts/santiviquez/241931071917176,496,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1605114051380-noauth.jpeg,315.0,Jeff Boudier,jeffboudier,829517500759566,"[{'type': 'text', 'value': 'This week in Inference Endpoints - thx ', 'raw': 'This week in Inference Endpoints - thx '}, {'type': 'mention', 'user': 'erikkaum', 'raw': '@erikkaum'}, {'type': 'text', 'value': ' for the update!', 'raw': ' for the update!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👀 ', 'raw': '👀 '}, {'type': 'link', 'href': 'https://huggingface.co/blog/erikkaum/endpoints-changelog', 'raw': 'https://huggingface.co/blog/erikkaum/endpoints-changelog'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","This week in Inference Endpoints - thx @erikkaum for the update! + +👀 https://huggingface.co/blog/erikkaum/endpoints-changelog ",[],"[{'_id': '63148c4db031f7b1c7bc36f9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63148c4db031f7b1c7bc36f9/sl0HUVNI0G_yfScJB38yZ.jpeg', 'fullname': 'Erik Kaunismäki', 'name': 'erikkaum', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 46}]","[{'reaction': '🚀', 'users': ['John6666'], 'count': 1}, {'reaction': '👍', 'users': ['John6666'], 'count': 1}, {'reaction': '🔥', 'users': ['erikkaum'], 'count': 1}, {'reaction': '❤️', 'users': ['adamelliotfields'], 'count': 1}]",2024-10-11 16:11:53,2024-10-12 09:40:04.987,"[{'_id': '63148c4db031f7b1c7bc36f9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63148c4db031f7b1c7bc36f9/sl0HUVNI0G_yfScJB38yZ.jpeg', 'fullname': 'Erik Kaunismäki', 'name': 'erikkaum', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 46, 'isFollowing': False}]",/posts/jeffboudier/829517500759566,1113,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,418580937119777,"[{'type': 'text', 'value': '🎓 Introducing Lusana.ru Presentations Dataset - ', 'raw': '🎓 Introducing Lusana.ru Presentations Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/lusana'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/lusana', 'raw': 'https://huggingface.co/datasets/nyuuzyou/lusana'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset highlights:', 'raw': 'Dataset highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 38,953 presentations from lusana.ru, a platform for storing presentations, reports, templates, and backgrounds', 'raw': '- 38,953 presentations from lusana.ru, a platform for storing presentations, reports, templates, and backgrounds'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Primarily in Russian, with some English and potentially other languages', 'raw': '- Primarily in Russian, with some English and potentially other languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Each entry includes: ID, title, download URL, uniqueness score, number of slides, views, downloads, file size, file path, and extracted text content (where available)', 'raw': '- Each entry includes: ID, title, download URL, uniqueness score, number of slides, views, downloads, file size, file path, and extracted text content (where available)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Contains original PPT/PPTX files in addition to metadata', 'raw': '- Contains original PPT/PPTX files in addition to metadata'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Data covers a wide range of topics and presentation materials', 'raw': '- Data covers a wide range of topics and presentation materials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Licensed under Creative Commons Attribution-NonCommercial 3.0 Unported (CC BY-NC 3.0)', 'raw': '- Licensed under Creative Commons Attribution-NonCommercial 3.0 Unported (CC BY-NC 3.0)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The dataset can be used for analyzing presentation content in Russian and other languages, text classification tasks, and information retrieval systems. It's also valuable for examining trends in educational and professional presentation materials and sharing practices in the Russian-speaking community. The inclusion of original files allows for in-depth analysis of presentation formats and structures."", 'raw': ""The dataset can be used for analyzing presentation content in Russian and other languages, text classification tasks, and information retrieval systems. It's also valuable for examining trends in educational and professional presentation materials and sharing practices in the Russian-speaking community. The inclusion of original files allows for in-depth analysis of presentation formats and structures.""}]","🎓 Introducing Lusana.ru Presentations Dataset - https://huggingface.co/datasets/nyuuzyou/lusana + +Dataset highlights: +- 38,953 presentations from lusana.ru, a platform for storing presentations, reports, templates, and backgrounds +- Primarily in Russian, with some English and potentially other languages +- Each entry includes: ID, title, download URL, uniqueness score, number of slides, views, downloads, file size, file path, and extracted text content (where available) +- Contains original PPT/PPTX files in addition to metadata +- Data covers a wide range of topics and presentation materials +- Licensed under Creative Commons Attribution-NonCommercial 3.0 Unported (CC BY-NC 3.0) + +The dataset can be used for analyzing presentation content in Russian and other languages, text classification tasks, and information retrieval systems. It's also valuable for examining trends in educational and professional presentation materials and sharing practices in the Russian-speaking community. The inclusion of original files allows for in-depth analysis of presentation formats and structures.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-11 15:13:01,2024-10-11 15:13:01.887,[],/posts/nyuuzyou/418580937119777,346,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,802613539599324,"[{'type': 'text', 'value': 'This is not a drill 💥', 'raw': 'This is not a drill 💥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'HuggingChat is now multimodal with ', 'raw': 'HuggingChat is now multimodal with '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meta-llama/Llama-3.2-11B-Vision-Instruct'}, 'url': 'https://huggingface.co/meta-llama/Llama-3.2-11B-Vision-Instruct', 'raw': 'https://huggingface.co/meta-llama/Llama-3.2-11B-Vision-Instruct'}, {'type': 'text', 'value': '! 🤗', 'raw': '! 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This also comes with multimodal assistants, I have migrated my Marcus Aurelius advice assistant to Llama-Vision and Marcus can see now! 😄', 'raw': 'This also comes with multimodal assistants, I have migrated my Marcus Aurelius advice assistant to Llama-Vision and Marcus can see now! 😄'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chat with Marcus: ', 'raw': 'Chat with Marcus: '}, {'type': 'link', 'href': 'https://hf.co/chat/assistant/65bfed22022ba290531112f8', 'raw': 'https://hf.co/chat/assistant/65bfed22022ba290531112f8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Start chatting with Llama-Vision 3.2 11B Instruct ', 'raw': 'Start chatting with Llama-Vision 3.2 11B Instruct '}, {'type': 'link', 'href': 'https://huggingface.co/chat/models/meta-llama/Llama-3.2-11B-Vision-Instruct', 'raw': 'https://huggingface.co/chat/models/meta-llama/Llama-3.2-11B-Vision-Instruct'}]","This is not a drill 💥 +HuggingChat is now multimodal with https://huggingface.co/meta-llama/Llama-3.2-11B-Vision-Instruct! 🤗 +This also comes with multimodal assistants, I have migrated my Marcus Aurelius advice assistant to Llama-Vision and Marcus can see now! 😄 + +Chat with Marcus: https://hf.co/chat/assistant/65bfed22022ba290531112f8 +Start chatting with Llama-Vision 3.2 11B Instruct https://huggingface.co/chat/models/meta-llama/Llama-3.2-11B-Vision-Instruct","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/oIGjs1hrJsEWD67huwkeK.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['Csplk', 'John6666', 'taufiqdp', 'emreozer', 'Walmart-the-bag', 'den0620', 'alielfilali01', 'LeonceNsh', 'baratpaim'], 'count': 9}, {'reaction': '❤️', 'users': ['ijohn07', 'BUAADreamer', 'alielfilali01', 'sikang99'], 'count': 4}, {'reaction': '🔥', 'users': ['nazimali'], 'count': 1}]",2024-10-11 10:04:55,2024-10-11 12:00:44.917,"[{'_id': '6708f203091966a0881537d2', 'avatarUrl': '/avatars/9f323b99bc740bd31725a9559404e9a5.svg', 'fullname': 'Ross', 'name': 'JamesRoss99', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/merve/802613539599324,2857,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png,2266.0,Tom Aarsen,tomaarsen,745802404696558,"[{'type': 'text', 'value': '📣 Sentence Transformers v3.2.0 is out, marking the biggest release for inference in 2 years! 2 new backends for embedding models: ONNX (+ optimization & quantization) and OpenVINO, allowing for speedups up to 2x-3x AND Static Embeddings for 500x speedups at 10-20% accuracy cost. ', 'raw': '📣 Sentence Transformers v3.2.0 is out, marking the biggest release for inference in 2 years! 2 new backends for embedding models: ONNX (+ optimization & quantization) and OpenVINO, allowing for speedups up to 2x-3x AND Static Embeddings for 500x speedups at 10-20% accuracy cost. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ ONNX Backend: This backend uses the ONNX Runtime to accelerate model inference on both CPU and GPU, reaching up to 1.4x-3x speedup depending on the precision. We also introduce 2 helper methods for optimizing and quantizing models for (much) faster inference. ', 'raw': '1️⃣ ONNX Backend: This backend uses the ONNX Runtime to accelerate model inference on both CPU and GPU, reaching up to 1.4x-3x speedup depending on the precision. We also introduce 2 helper methods for optimizing and quantizing models for (much) faster inference. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ OpenVINO Backend: This backend uses Intel their OpenVINO instead, outperforming ONNX in some situations on CPU.', 'raw': '2️⃣ OpenVINO Backend: This backend uses Intel their OpenVINO instead, outperforming ONNX in some situations on CPU.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Usage is as simple as ', 'raw': 'Usage is as simple as '}, {'type': 'inline_code', 'code': 'SentenceTransformer(""all-MiniLM-L6-v2"", backend=""onnx"")', 'raw': '`SentenceTransformer(""all-MiniLM-L6-v2"", backend=""onnx"")`'}, {'type': 'text', 'value': "". Does your model not have an ONNX or OpenVINO file yet? No worries - it'll be autoexported for you. Thank me later 😉"", 'raw': "". Does your model not have an ONNX or OpenVINO file yet? No worries - it'll be autoexported for you. Thank me later 😉""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🔒 Another major new feature is Static Embeddings: think word embeddings like GLoVe and word2vec, but modernized. Static Embeddings are bags of token embeddings that are summed together to create text embeddings, allowing for lightning-fast embeddings that don't require any neural networks. They're initialized in one of 2 ways:"", 'raw': ""🔒 Another major new feature is Static Embeddings: think word embeddings like GLoVe and word2vec, but modernized. Static Embeddings are bags of token embeddings that are summed together to create text embeddings, allowing for lightning-fast embeddings that don't require any neural networks. They're initialized in one of 2 ways:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ via Model2Vec, a new technique for distilling any Sentence Transformer models into static embeddings. Either via a pre-distilled model with ', 'raw': '1️⃣ via Model2Vec, a new technique for distilling any Sentence Transformer models into static embeddings. Either via a pre-distilled model with '}, {'type': 'inline_code', 'code': 'from_model2vec', 'raw': '`from_model2vec`'}, {'type': 'text', 'value': ' or with ', 'raw': ' or with '}, {'type': 'inline_code', 'code': 'from_distillation', 'raw': '`from_distillation`'}, {'type': 'text', 'value': "" where you do the distillation yourself. It'll only take 5 seconds on GPU & 2 minutes on CPU, no dataset needed."", 'raw': "" where you do the distillation yourself. It'll only take 5 seconds on GPU & 2 minutes on CPU, no dataset needed.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Random initialization. This requires finetuning, but finetuning is extremely quick (e.g. I trained with 3 million pairs in 7 minutes). My final model was 6.6% worse than bge-base-en-v1.5, but 500x faster on CPU.', 'raw': '2️⃣ Random initialization. This requires finetuning, but finetuning is extremely quick (e.g. I trained with 3 million pairs in 7 minutes). My final model was 6.6% worse than bge-base-en-v1.5, but 500x faster on CPU.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full release notes: ', 'raw': 'Full release notes: '}, {'type': 'link', 'href': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v3.2.0', 'raw': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v3.2.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Documentation on Speeding up Inference: ', 'raw': 'Documentation on Speeding up Inference: '}, {'type': 'link', 'href': 'https://sbert.net/docs/sentence_transformer/usage/efficiency.html', 'raw': 'https://sbert.net/docs/sentence_transformer/usage/efficiency.html'}]","📣 Sentence Transformers v3.2.0 is out, marking the biggest release for inference in 2 years! 2 new backends for embedding models: ONNX (+ optimization & quantization) and OpenVINO, allowing for speedups up to 2x-3x AND Static Embeddings for 500x speedups at 10-20% accuracy cost. + +1️⃣ ONNX Backend: This backend uses the ONNX Runtime to accelerate model inference on both CPU and GPU, reaching up to 1.4x-3x speedup depending on the precision. We also introduce 2 helper methods for optimizing and quantizing models for (much) faster inference. +2️⃣ OpenVINO Backend: This backend uses Intel their OpenVINO instead, outperforming ONNX in some situations on CPU. + +Usage is as simple as `SentenceTransformer(""all-MiniLM-L6-v2"", backend=""onnx"")`. Does your model not have an ONNX or OpenVINO file yet? No worries - it'll be autoexported for you. Thank me later 😉 + +🔒 Another major new feature is Static Embeddings: think word embeddings like GLoVe and word2vec, but modernized. Static Embeddings are bags of token embeddings that are summed together to create text embeddings, allowing for lightning-fast embeddings that don't require any neural networks. They're initialized in one of 2 ways: + +1️⃣ via Model2Vec, a new technique for distilling any Sentence Transformer models into static embeddings. Either via a pre-distilled model with `from_model2vec` or with `from_distillation` where you do the distillation yourself. It'll only take 5 seconds on GPU & 2 minutes on CPU, no dataset needed. +2️⃣ Random initialization. This requires finetuning, but finetuning is extremely quick (e.g. I trained with 3 million pairs in 7 minutes). My final model was 6.6% worse than bge-base-en-v1.5, but 500x faster on CPU. + +Full release notes: https://github.com/UKPLab/sentence-transformers/releases/tag/v3.2.0 +Documentation on Speeding up Inference: https://sbert.net/docs/sentence_transformer/usage/efficiency.html","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/7yCQOHRtsylLFshQgWwD_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/d4NzK9ortnclhR_Mu3N56.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/3nvhZLPTM7VpnzRwBcD8a.png'}]",[],"[{'reaction': '🔥', 'users': ['osanseviero', 'YaTharThShaRma999', 'philipp-zettl', 'Kutches', 'DmitryRyumin', 'RaulQF', 'do-me', 'Stopwolf', 'mlabonne', 'WaveCut', 'Joseph717171', 'cstr', 'codito', 'gabrielmbmb', 'Tom-Neverwinter', 'richardlian', 'AaronBrown', 'abdullahalzubaer', 'shtefcs', 'louisbrulenaudet', 'aklepikov', 'tuantm', 'DamarJati'], 'count': 23}, {'reaction': '❤️', 'users': ['Svngoku', 'lucagychen', 'Tom-Neverwinter', 'syedia', 'AaronBrown', 'abdullah', 'abdullahalzubaer', 'shtefcs', 'thomas-mayne', 'louisbrulenaudet', 'hfposts', 'Manel', 'jeewanjoga'], 'count': 13}, {'reaction': '🚀', 'users': ['osanseviero', 'YaTharThShaRma999', 'John6666', 'Joseph717171', 'Tom-Neverwinter', 'Siddish', 'abdullahalzubaer', 'shtefcs', 'NickyNicky'], 'count': 9}]",2024-10-10 18:30:03,2024-10-11 23:11:48.122,"[{'_id': '6708d10f793a1fcd6c7633fb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/TwR65k1JgO_t3l4pM1UjA.png', 'fullname': 'Stefan Smiljkovic', 'name': 'shtefcs', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 96, 'isFollowing': False}]",/posts/tomaarsen/745802404696558,7189,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65d50e9ef9cbfa798c590004/FlVe8chafigMfrPpMeJRL.jpeg,133.0,Jared Sulzdorf,jsulz,109226691752296,"[{'type': 'text', 'value': 'The Hugging Face Hub hosts over 1.5M Model, Dataset, and Space repositories. To scale to 10M+, the XetHub team (', 'raw': 'The Hugging Face Hub hosts over 1.5M Model, Dataset, and Space repositories. To scale to 10M+, the XetHub team ('}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'xet-team'}, 'url': 'https://huggingface.co/xet-team', 'raw': 'https://huggingface.co/xet-team', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66b05ca6e7c57eac7cafbbc4/f-BRRaSr0QLq3nHlLqD3o.png'}, {'type': 'text', 'value': ') is replacing Git LFS with a new technology that improves storage and transfer capabilities with some future developer experience benefits to boot.', 'raw': ') is replacing Git LFS with a new technology that improves storage and transfer capabilities with some future developer experience benefits to boot.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks to ', 'raw': 'Thanks to '}, {'type': 'mention', 'user': 'yuchenglow', 'raw': '@yuchenglow'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'port8080', 'raw': '@port8080'}, {'type': 'text', 'value': ' (for their analysis covering LFS usage from March 2022–Sept 2024), we now have insights into what we’re storing. Check out the Gradio app to explore:', 'raw': ' (for their analysis covering LFS usage from March 2022–Sept 2024), we now have insights into what we’re storing. Check out the Gradio app to explore:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Storage growth over time', 'raw': '- Storage growth over time'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- File types over all repositories', 'raw': '- File types over all repositories'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Some simple optimizations we're investigating"", 'raw': ""- Some simple optimizations we're investigating""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'xet-team/lfs-analysis'}, 'url': 'https://huggingface.co/spaces/xet-team/lfs-analysis', 'raw': 'https://huggingface.co/spaces/xet-team/lfs-analysis'}]","The Hugging Face Hub hosts over 1.5M Model, Dataset, and Space repositories. To scale to 10M+, the XetHub team (https://huggingface.co/xet-team) is replacing Git LFS with a new technology that improves storage and transfer capabilities with some future developer experience benefits to boot. + +Thanks to @yuchenglow and @port8080 (for their analysis covering LFS usage from March 2022–Sept 2024), we now have insights into what we’re storing. Check out the Gradio app to explore: +- Storage growth over time +- File types over all repositories +- Some simple optimizations we're investigating + +https://huggingface.co/spaces/xet-team/lfs-analysis",[],"[{'_id': '65e77dcc714ce98ddd82568e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65e77dcc714ce98ddd82568e/KhIkyM1Hc00t3zAqIaDoH.jpeg', 'fullname': 'Banerjee', 'name': 'port8080', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28}, {'_id': '66ac094a8fc00b5c160d7da4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66ac094a8fc00b5c160d7da4/1-DnsQ0zlyTA-18bncHbt.jpeg', 'fullname': 'yuchenglow', 'name': 'yuchenglow', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 50}]","[{'reaction': '🔥', 'users': ['erinys', 'John6666', 'Nymbo'], 'count': 3}, {'reaction': '🤗', 'users': ['Aurelien-Morgan'], 'count': 1}]",2024-10-10 18:12:21,2024-10-10 18:12:21.229,[],/posts/jsulz/109226691752296,1689,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg,3949.0,Victor Mustar,victor,809715889668727,"[{'type': 'text', 'value': 'NEW - Inference Playground', 'raw': 'NEW - Inference Playground'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Maybe like me you have always wanted a super easy way to compare llama3.2-1B vs. llama3.2-3B? or the same model with different temperatures?', 'raw': 'Maybe like me you have always wanted a super easy way to compare llama3.2-1B vs. llama3.2-3B? or the same model with different temperatures?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Trying and comparing warm Inference API models has never been easier!', 'raw': 'Trying and comparing warm Inference API models has never been easier!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just go to ', 'raw': 'Just go to '}, {'type': 'link', 'href': 'https://hf.co/playground', 'raw': 'https://hf.co/playground'}, {'type': 'text', 'value': "", set your token and you're ready to go."", 'raw': "", set your token and you're ready to go.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We'll keep improving, feedback welcome 😊"", 'raw': ""We'll keep improving, feedback welcome 😊""}]","NEW - Inference Playground + +Maybe like me you have always wanted a super easy way to compare llama3.2-1B vs. llama3.2-3B? or the same model with different temperatures? + +Trying and comparing warm Inference API models has never been easier! +Just go to https://hf.co/playground, set your token and you're ready to go. +We'll keep improving, feedback welcome 😊","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/JYpXQyk7kl1j17U9-Ry5c.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/RXIlAUQMJswSSh-0i8etk.png'}]",[],"[{'reaction': '🤗', 'users': ['adamelliotfields', 'ZeroXClem', 'KingNish', 'cfahlgren1', 'John6666', 'Kquant03', 'WE52', 'jeffboudier', 'not-lain', 'AtAndDev', 'alielfilali01', 'louisbrulenaudet', 'Nymbo'], 'count': 13}, {'reaction': '🔥', 'users': ['ZeroXClem', 'cfahlgren1', 'John6666', 'Kquant03', 'WE52', 'jeffboudier', 'not-lain', 'AtAndDev', 'alielfilali01'], 'count': 9}, {'reaction': '❤️', 'users': ['John6666', 'WE52', 'jeffboudier', 'not-lain', 'AtAndDev', 'alielfilali01'], 'count': 6}, {'reaction': '🚀', 'users': ['John6666', 'WE52', 'jeffboudier', 'not-lain', 'AtAndDev', 'alielfilali01'], 'count': 6}, {'reaction': '➕', 'users': ['John6666', 'WE52', 'not-lain', 'AtAndDev', 'alielfilali01'], 'count': 5}, {'reaction': '👍', 'users': ['John6666', 'WE52', 'not-lain', 'AtAndDev'], 'count': 4}, {'reaction': '🤝', 'users': ['WE52', 'not-lain', 'AtAndDev'], 'count': 3}]",2024-10-10 16:43:08,2024-10-15 17:45:14.764,"[{'_id': '665de0a504fd28c09cf3a6d8', 'avatarUrl': '/avatars/5e9554ba5afb386c170ff66ab9c8c363.svg', 'fullname': 'Andrea Altomani', 'name': 'andreaaltomani', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}]",/posts/victor/809715889668727,2694,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,158022746603972,"[{'type': 'text', 'value': 'Rhymes AI drops Aria: small Multimodal MoE that beats GPT-4o and Gemini-1.5-Flash ⚡️', 'raw': 'Rhymes AI drops Aria: small Multimodal MoE that beats GPT-4o and Gemini-1.5-Flash ⚡️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""New player entered the game! Rhymes AI has just been announced, and unveiled Aria – a multimodal powerhouse that's punching above its weight."", 'raw': ""New player entered the game! Rhymes AI has just been announced, and unveiled Aria – a multimodal powerhouse that's punching above its weight.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key insights:', 'raw': 'Key insights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Mixture-of-Experts architecture: 25.3B total params, but only 3.9B active.', 'raw': '🧠 Mixture-of-Experts architecture: 25.3B total params, but only 3.9B active.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌈 Multimodal: text/image/video → text.', 'raw': '🌈 Multimodal: text/image/video → text.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Novel training approach: “multimodal-native” where multimodal training starts directly during pre-training, not just tacked on later', 'raw': '📚 Novel training approach: “multimodal-native” where multimodal training starts directly during pre-training, not just tacked on later'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📏 Long 64K token context window', 'raw': '📏 Long 64K token context window'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔓 Apache 2.0 license, with weights, code, and demos all open', 'raw': '🔓 Apache 2.0 license, with weights, code, and demos all open'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡️ On the benchmark side, Aria leaves some big names in the dust.', 'raw': '⚡️ On the benchmark side, Aria leaves some big names in the dust.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It beats Pixtral 12B or Llama-3.2-12B on several vision benchmarks like MMMU or MathVista.', 'raw': '- It beats Pixtral 12B or Llama-3.2-12B on several vision benchmarks like MMMU or MathVista.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- It even overcomes the much bigger GPT-4o on long video tasks and even outshines Gemini 1.5 Flash when it comes to parsing lengthy documents.', 'raw': '- It even overcomes the much bigger GPT-4o on long video tasks and even outshines Gemini 1.5 Flash when it comes to parsing lengthy documents.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""But Rhymes AI isn't just showing off benchmarks. They've already got Aria powering a real-world augmented search app called “Beago”. It’s handling even recent events with great accuracy!"", 'raw': ""But Rhymes AI isn't just showing off benchmarks. They've already got Aria powering a real-world augmented search app called “Beago”. It’s handling even recent events with great accuracy!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And they partnered with AMD to make it much faster than competitors like Perplexity or Gemini search.', 'raw': 'And they partnered with AMD to make it much faster than competitors like Perplexity or Gemini search.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read their paper for Aria 👉\xa0', 'raw': 'Read their paper for Aria 👉\xa0'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2410.05993'}, 'url': 'https://huggingface.co/papers/2410.05993', 'raw': 'https://huggingface.co/papers/2410.05993', 'label': 'Aria: An Open Multimodal Native Mixture-of-Experts Model (2410.05993)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try BeaGo 🐶 👉\xa0', 'raw': 'Try BeaGo 🐶 👉\xa0'}, {'type': 'link', 'href': 'https://rhymes.ai/blog-details/introducing-beago-your-smarter-faster-ai-search', 'raw': 'https://rhymes.ai/blog-details/introducing-beago-your-smarter-faster-ai-search'}]","Rhymes AI drops Aria: small Multimodal MoE that beats GPT-4o and Gemini-1.5-Flash ⚡️ + +New player entered the game! Rhymes AI has just been announced, and unveiled Aria – a multimodal powerhouse that's punching above its weight. + +Key insights: + +🧠 Mixture-of-Experts architecture: 25.3B total params, but only 3.9B active. + +🌈 Multimodal: text/image/video → text. + +📚 Novel training approach: “multimodal-native” where multimodal training starts directly during pre-training, not just tacked on later + +📏 Long 64K token context window + +🔓 Apache 2.0 license, with weights, code, and demos all open + +⚡️ On the benchmark side, Aria leaves some big names in the dust. + +- It beats Pixtral 12B or Llama-3.2-12B on several vision benchmarks like MMMU or MathVista. +- It even overcomes the much bigger GPT-4o on long video tasks and even outshines Gemini 1.5 Flash when it comes to parsing lengthy documents. + +But Rhymes AI isn't just showing off benchmarks. They've already got Aria powering a real-world augmented search app called “Beago”. It’s handling even recent events with great accuracy! + +And they partnered with AMD to make it much faster than competitors like Perplexity or Gemini search. + +Read their paper for Aria ��� https://huggingface.co/papers/2410.05993 + +Try BeaGo 🐶 👉 https://rhymes.ai/blog-details/introducing-beago-your-smarter-faster-ai-search","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/1RiEQ_wA4b_YI4HweDGiQ.png'}]",[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'dlflannery', 'jordivcb', 'reach-vb', 'iojvsuynv', 'KingNish', 'victor', 'dishank002', 'IMFDEtienne', 'nina-summer', 'teowu'], 'count': 11}, {'reaction': '👀', 'users': ['John6666', 'DataSoul', 'reach-vb', 'C0casio45', 'iojvsuynv', 'victor', 'v000000', 'louisbrulenaudet'], 'count': 8}, {'reaction': '🤝', 'users': ['nina-summer'], 'count': 1}]",2024-10-10 09:51:31,2024-10-10 19:26:36.766,"[{'_id': '66f5198cb4ee0bf6882d8062', 'avatarUrl': '/avatars/4b3f3b8fe4ab980cbfaab52afe52dfc9.svg', 'fullname': 'Aleks', 'name': 'aleksfinn23', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/m-ric/158022746603972,2954,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1655385361868-61b85ce86eb1f2c5e6233736.jpeg,930.0,Vaibhav Srivastav,reach-vb,917454968346786,"[{'type': 'text', 'value': 'NEW: Open Source Text/ Image to video model is out - MIT licensed - Rivals Gen-3, Pika & Kling 🔥', 'raw': 'NEW: Open Source Text/ Image to video model is out - MIT licensed - Rivals Gen-3, Pika & Kling 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Pyramid Flow: Training-efficient Autoregressive Video Generation method', 'raw': '> Pyramid Flow: Training-efficient Autoregressive Video Generation method'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Utilizes Flow Matching', 'raw': '> Utilizes Flow Matching'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Trains on open-source datasets', 'raw': '> Trains on open-source datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Generates high-quality 10-second videos', 'raw': '> Generates high-quality 10-second videos'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Video resolution: 768p', 'raw': '> Video resolution: 768p'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Frame rate: 24 FPS', 'raw': '> Frame rate: 24 FPS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Supports image-to-video generation', 'raw': '> Supports image-to-video generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Model checkpoints available on the hub 🤗: ', 'raw': '> Model checkpoints available on the hub 🤗: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'rain1011/pyramid-flow-sd3'}, 'url': 'https://huggingface.co/rain1011/pyramid-flow-sd3', 'raw': 'https://huggingface.co/rain1011/pyramid-flow-sd3'}]","NEW: Open Source Text/ Image to video model is out - MIT licensed - Rivals Gen-3, Pika & Kling 🔥 + +> Pyramid Flow: Training-efficient Autoregressive Video Generation method +> Utilizes Flow Matching +> Trains on open-source datasets +> Generates high-quality 10-second videos +> Video resolution: 768p +> Frame rate: 24 FPS +> Supports image-to-video generation + +> Model checkpoints available on the hub 🤗: https://huggingface.co/rain1011/pyramid-flow-sd3","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b85ce86eb1f2c5e6233736/7fTuWxBKAlkaPIDJgtPJ5.mp4'}]",[],"[{'reaction': '👍', 'users': ['lab212', 'YaTharThShaRma999', 'roger-temp', 'natalie5', 'victor', 'tonynoce', 'MAsad789565', 'zhang123123', 'den0620', 'WaveCut', 'satheeshkola532'], 'count': 11}, {'reaction': '🔥', 'users': ['YaTharThShaRma999', 'KingNish', 'adamelliotfields', 'victor', 'jaigurudev', 'RalphX1', 'tolgacangoz'], 'count': 7}, {'reaction': '👀', 'users': ['John6666', 'YaTharThShaRma999', 'jaigurudev'], 'count': 3}]",2024-10-10 09:02:01,2024-10-10 09:02:01.744,[],/posts/reach-vb/917454968346786,3417,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,653106772597209,"[{'type': 'text', 'value': '🎓 Introducing Doc4web.ru Documents Dataset - ', 'raw': '🎓 Introducing Doc4web.ru Documents Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/doc4web'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/doc4web', 'raw': 'https://huggingface.co/datasets/nyuuzyou/doc4web'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset highlights:', 'raw': 'Dataset highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 223,739 documents from doc4web.ru, a document hosting platform for students and teachers', 'raw': '- 223,739 documents from doc4web.ru, a document hosting platform for students and teachers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Primarily in Russian, with some English and potentially other languages', 'raw': '- Primarily in Russian, with some English and potentially other languages'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Each entry includes: URL, title, download link, file path, and content (where available)', 'raw': '- Each entry includes: URL, title, download link, file path, and content (where available)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Contains original document files in addition to metadata', 'raw': '- Contains original document files in addition to metadata'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Data reflects a wide range of educational topics and materials', 'raw': '- Data reflects a wide range of educational topics and materials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Licensed under Creative Commons Zero (CC0) for unrestricted use', 'raw': '- Licensed under Creative Commons Zero (CC0) for unrestricted use'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The dataset can be used for analyzing educational content in Russian, text classification tasks, and information retrieval systems. It's also valuable for examining trends in educational materials and document sharing practices in the Russian-speaking academic community. The inclusion of original files allows for in-depth analysis of various document formats and structures."", 'raw': ""The dataset can be used for analyzing educational content in Russian, text classification tasks, and information retrieval systems. It's also valuable for examining trends in educational materials and document sharing practices in the Russian-speaking academic community. The inclusion of original files allows for in-depth analysis of various document formats and structures.""}]","🎓 Introducing Doc4web.ru Documents Dataset - https://huggingface.co/datasets/nyuuzyou/doc4web + +Dataset highlights: +- 223,739 documents from doc4web.ru, a document hosting platform for students and teachers +- Primarily in Russian, with some English and potentially other languages +- Each entry includes: URL, title, download link, file path, and content (where available) +- Contains original document files in addition to metadata +- Data reflects a wide range of educational topics and materials +- Licensed under Creative Commons Zero (CC0) for unrestricted use + +The dataset can be used for analyzing educational content in Russian, text classification tasks, and information retrieval systems. It's also valuable for examining trends in educational materials and document sharing practices in the Russian-speaking academic community. The inclusion of original files allows for in-depth analysis of various document formats and structures.",[],[],"[{'reaction': '👀', 'users': ['osanseviero', 'John6666', 'victor', 'DmitryRyumin', 'WaveCut', 'DeathGodlike', 'den0620', 'louisbrulenaudet'], 'count': 8}, {'reaction': '❤️', 'users': ['d0rj', 'aleksfinn23', 'IlyaGusev', 'DeathGodlike', 'den0620'], 'count': 5}]",2024-10-10 07:58:42,2024-10-10 07:58:42.561,[],/posts/nyuuzyou/653106772597209,1997,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,945593396015382,"[{'type': 'text', 'value': '🇨🇳⛵️ 出海: Chinese AI is expanding globally', 'raw': '🇨🇳⛵️ 出海: Chinese AI is expanding globally'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fact: Chinese LLMs are heavily underrated, for instance recently the excellent Deepseek-v2.5 or Qwen models. ', 'raw': 'Fact: Chinese LLMs are heavily underrated, for instance recently the excellent Deepseek-v2.5 or Qwen models. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Luckily for us, ', 'raw': 'Luckily for us, '}, {'type': 'mention', 'user': 'AdinaY', 'raw': '@AdinaY'}, {'type': 'text', 'value': ' just wrote an excellent blog post explaining the Chinese AI ecosystem!', 'raw': ' just wrote an excellent blog post explaining the Chinese AI ecosystem!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My key takeaways:', 'raw': 'My key takeaways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Since Google, OpenAI and Anthropic models are not available in China, local companies are fighting for the market. A really good market - AI has much higher penetration there than in the rest of the world, both with companies and individual users!', 'raw': 'Since Google, OpenAI and Anthropic models are not available in China, local companies are fighting for the market. A really good market - AI has much higher penetration there than in the rest of the world, both with companies and individual users!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💰 But since Deepseek heavily cut prices in May 24, this spiraled into a price war that created a cut-throat environment with unsustainably low prices.', 'raw': '💰 But since Deepseek heavily cut prices in May 24, this spiraled into a price war that created a cut-throat environment with unsustainably low prices.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📋 On top of this, the local regulation is stringent: models must undergo licensing from a local censor (the Cyberspace Administration of China), that for instance requires models to refuse answering certain questions on the CCP. Although this is certainly simpler to implement than certain condition of the European AI Act.', 'raw': '📋 On top of this, the local regulation is stringent: models must undergo licensing from a local censor (the Cyberspace Administration of China), that for instance requires models to refuse answering certain questions on the CCP. Although this is certainly simpler to implement than certain condition of the European AI Act.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""💸 If this wasn't enough, VC investment in AI is drying out: By mid-2024, Chinese AI startups raised approximately $4.4 billion, vs $55B for US startups just in Q2 24."", 'raw': ""💸 If this wasn't enough, VC investment in AI is drying out: By mid-2024, Chinese AI startups raised approximately $4.4 billion, vs $55B for US startups just in Q2 24.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📱 To get profitability companies have shifted from foundational models to model + application, for instance PopAI from [01.AI](', 'raw': '📱 To get profitability companies have shifted from foundational models to model + application, for instance PopAI from [01.AI]('}, {'type': 'link', 'href': 'http://01.ai/', 'raw': 'http://01.ai/'}, {'type': 'text', 'value': ') with millions of users and high profitability.', 'raw': ') with millions of users and high profitability.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⛏️ They also try to drill down specific industries: but these niches are also getting crowded.', 'raw': '⛏️ They also try to drill down specific industries: but these niches are also getting crowded.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Since their home market is becoming both too crowded and unhospitable, Chinese companies are now going for international market, ""Sailing abroad"" following the expression consacred for Zheng He\'s legendary journey in 1500.', 'raw': '➡️ Since their home market is becoming both too crowded and unhospitable, Chinese companies are now going for international market, ""Sailing abroad"" following the expression consacred for Zheng He\'s legendary journey in 1500.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""There, they'll have to adapt to different infrastructures and regulations, but they have bright prospects for growth!"", 'raw': ""There, they'll have to adapt to different infrastructures and regulations, but they have bright prospects for growth!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read her post 👉\xa0', 'raw': 'Read her post 👉\xa0'}, {'type': 'link', 'href': 'https://huggingface.co/blog/AdinaY/chinese-ai-global-expansion', 'raw': 'https://huggingface.co/blog/AdinaY/chinese-ai-global-expansion'}]","🇨🇳⛵️ 出海: Chinese AI is expanding globally + +Fact: Chinese LLMs are heavily underrated, for instance recently the excellent Deepseek-v2.5 or Qwen models. + +Luckily for us, @AdinaY just wrote an excellent blog post explaining the Chinese AI ecosystem! + +My key takeaways: + +Since Google, OpenAI and Anthropic models are not available in China, local companies are fighting for the market. A really good market - AI has much higher penetration there than in the rest of the world, both with companies and individual users! + +💰 But since Deepseek heavily cut prices in May 24, this spiraled into a price war that created a cut-throat environment with unsustainably low prices. + +📋 On top of this, the local regulation is stringent: models must undergo licensing from a local censor (the Cyberspace Administration of China), that for instance requires models to refuse answering certain questions on the CCP. Although this is certainly simpler to implement than certain condition of the European AI Act. + +💸 If this wasn't enough, VC investment in AI is drying out: By mid-2024, Chinese AI startups raised approximately $4.4 billion, vs $55B for US startups just in Q2 24. + +📱 To get profitability companies have shifted from foundational models to model + application, for instance PopAI from [01.AI](http://01.ai/) with millions of users and high profitability. + +⛏️ They also try to drill down specific industries: but these niches are also getting crowded. + +➡️ Since their home market is becoming both too crowded and unhospitable, Chinese companies are now going for international market, ""Sailing abroad"" following the expression consacred for Zheng He's legendary journey in 1500. + +There, they'll have to adapt to different infrastructures and regulations, but they have bright prospects for growth! + +Read her post 👉 https://huggingface.co/blog/AdinaY/chinese-ai-global-expansion","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/o9qYeyLtZ1W8IhcOuB0x6.png'}]","[{'_id': '63a369d98c0c89dcae3b8329', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a369d98c0c89dcae3b8329/AiH2zjy1cnt9OADAAZMLD.jpeg', 'fullname': 'Adina Yakefu', 'name': 'AdinaY', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 774}]","[{'reaction': '🔥', 'users': ['AdinaY', 'DeathGodlike', 'ethancl1'], 'count': 3}, {'reaction': '🤗', 'users': ['AdinaY', 'John6666'], 'count': 2}, {'reaction': '🚀', 'users': ['ethancl1', 'louisbrulenaudet'], 'count': 2}]",2024-10-03 13:40:07,2024-10-03 13:53:46.499,[],/posts/m-ric/945593396015382,1336,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,597151798835464,"[{'type': 'text', 'value': 'ColPali is an exciting new approach to multimodal document retrieval, but some doubt its practical use with existing vector DBs.', 'raw': 'ColPali is an exciting new approach to multimodal document retrieval, but some doubt its practical use with existing vector DBs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It turns out it's super easy to use Qdrant to index and search ColPali embeddings efficiently."", 'raw': ""It turns out it's super easy to use Qdrant to index and search ColPali embeddings efficiently.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog post here: ', 'raw': 'Blog post here: '}, {'type': 'link', 'href': 'https://danielvanstrien.xyz/posts/post-with-code/colpali-qdrant/2024-10-02_using_colpali_with_qdrant.html', 'raw': 'https://danielvanstrien.xyz/posts/post-with-code/colpali-qdrant/2024-10-02_using_colpali_with_qdrant.html'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Very silly demo: ', 'raw': 'Very silly demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'davanstrien/ufo-ColPali-Search'}, 'url': 'https://huggingface.co/spaces/davanstrien/ufo-ColPali-Search', 'raw': 'https://huggingface.co/spaces/davanstrien/ufo-ColPali-Search'}]","ColPali is an exciting new approach to multimodal document retrieval, but some doubt its practical use with existing vector DBs. + +It turns out it's super easy to use Qdrant to index and search ColPali embeddings efficiently. + +Blog post here: https://danielvanstrien.xyz/posts/post-with-code/colpali-qdrant/2024-10-02_using_colpali_with_qdrant.html + +Very silly demo: https://huggingface.co/spaces/davanstrien/ufo-ColPali-Search",[],[],"[{'reaction': '👀', 'users': ['John6666', 'YaTharThShaRma999'], 'count': 2}, {'reaction': '🔥', 'users': ['YaTharThShaRma999'], 'count': 1}]",2024-10-03 12:42:54,2024-10-03 12:42:54.694,[],/posts/davanstrien/597151798835464,1285,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,871893662265982,"[{'type': 'text', 'value': 'Updated my 📺RTV🖼️ - Real Time Video AI app this morning.', 'raw': 'Updated my 📺RTV🖼️ - Real Time Video AI app this morning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'URL: ', 'raw': 'URL: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/awacke1/stable-video-diffusion', 'raw': 'https://huggingface.co/spaces/awacke1/stable-video-diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It uses Stable Diffusion to dynamically create videos from images in input directory or uploaded using A10 GPU on Huggingface.', 'raw': 'It uses Stable Diffusion to dynamically create videos from images in input directory or uploaded using A10 GPU on Huggingface.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Samples below.', 'raw': 'Samples below.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I may transition this to Zero GPU if I can. During Christmas when I revised this I had my highest billing from HF yet due to GPU usage. It is still the best turn key GPU out and Image2Video is a killer app. Thanks HF for the possibilities!', 'raw': 'I may transition this to Zero GPU if I can. During Christmas when I revised this I had my highest billing from HF yet due to GPU usage. It is still the best turn key GPU out and Image2Video is a killer app. Thanks HF for the possibilities!'}]","Updated my 📺RTV🖼️ - Real Time Video AI app this morning. +URL: https://huggingface.co/spaces/awacke1/stable-video-diffusion + +It uses Stable Diffusion to dynamically create videos from images in input directory or uploaded using A10 GPU on Huggingface. + + +Samples below. + +I may transition this to Zero GPU if I can. During Christmas when I revised this I had my highest billing from HF yet due to GPU usage. It is still the best turn key GPU out and Image2Video is a killer app. Thanks HF for the possibilities!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/itFP4Kk6PmemqscBb7qQO.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/lkycUPGX6xUYDrOojwC2A.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/pNzXsRlYD3-y8Zm3PFT1G.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/6l0583-nCW-1fx-zfEb6v.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/q-wmM9Z3E9rMsIRD89yLe.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/FiJ5b9Wib0SqC9-FQJuNX.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/RTWy0RqToaIPF9eEs3z4j.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/WM7QMh5UZXMrWVeUsSR0D.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/AjB3Rsx9oixLFGOfjzgZs.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/opTIrT7UYVcF5_FZnkrns.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/zF6GRwT1PX5E5CmAih8tX.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/A6hXBuxizd0I4fHnxiDW3.mp4'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '🔥', 'users': ['KingNish'], 'count': 1}]",2024-10-03 12:41:20,2024-10-03 12:41:20.512,[],/posts/awacke1/871893662265982,1021,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,585613287443442,"[{'type': 'text', 'value': 'Triton-accelerated nanoGPT🤕', 'raw': 'Triton-accelerated nanoGPT🤕'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The WHY behind this ordeal - After practicing triton for about 2 weeks now, I challenged myself into implementing custom triton kernels for Karpathy's nanoGPT and quite an ordeal it was but somehow got something working, not perfect but getting there:), contributions are welcomed."", 'raw': ""The WHY behind this ordeal - After practicing triton for about 2 weeks now, I challenged myself into implementing custom triton kernels for Karpathy's nanoGPT and quite an ordeal it was but somehow got something working, not perfect but getting there:), contributions are welcomed.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/Jaykef/Triton-nanoGPT', 'raw': 'https://github.com/Jaykef/Triton-nanoGPT'}]","Triton-accelerated nanoGPT🤕 +The WHY behind this ordeal - After practicing triton for about 2 weeks now, I challenged myself into implementing custom triton kernels for Karpathy's nanoGPT and quite an ordeal it was but somehow got something working, not perfect but getting there:), contributions are welcomed. + +Code: https://github.com/Jaykef/Triton-nanoGPT","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/exKfTtOZLMDTDmZNNLRMW.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/5Ng54XxDEAj-6EfXtz9Xh.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-03 12:02:16,2024-10-04 04:20:23.535,[],/posts/Jaward/585613287443442,376,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1613511937628-5fb15d1e84389b139cf3b508.jpeg,346.0,Moritz Laurer,MoritzLaurer,935333471653456,"[{'type': 'text', 'value': '#phdone - I defended my PhD yesterday! A key lesson: it is amazing how open science and open source can empower beginners with limited resources:', 'raw': '#phdone - I defended my PhD yesterday! A key lesson: it is amazing how open science and open source can empower beginners with limited resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I first learned about instruction-based classifiers like BERT-NLI 3-4 years ago, through the ', 'raw': 'I first learned about instruction-based classifiers like BERT-NLI 3-4 years ago, through the '}, {'type': 'mention', 'user': 'HuggingFace', 'raw': '@HuggingFace'}, {'type': 'text', 'value': "" ZeroShotClassificationPipeline. Digging deeper into this, it was surprisingly easy to find new datasets, newer base models, and reusable fine-tuning scripts on the HF Hub to create my own zeroshot models - although I didn't know much about fine-tuning at the time."", 'raw': "" ZeroShotClassificationPipeline. Digging deeper into this, it was surprisingly easy to find new datasets, newer base models, and reusable fine-tuning scripts on the HF Hub to create my own zeroshot models - although I didn't know much about fine-tuning at the time.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks to the community effect of the Hub, my models were downloaded hundreds of thousands of times after a few months. Seeing my research being useful for people motivated me to improve and upload newer models. Leaving my contact details in the model cards led to academic cooperation and consulting contracts (and eventually my job at HF).', 'raw': 'Thanks to the community effect of the Hub, my models were downloaded hundreds of thousands of times after a few months. Seeing my research being useful for people motivated me to improve and upload newer models. Leaving my contact details in the model cards led to academic cooperation and consulting contracts (and eventually my job at HF).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""That's the power of open science & open source: learning, sharing, improving, collaborating."", 'raw': ""That's the power of open science & open source: learning, sharing, improving, collaborating.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I mean every word in my thesis acknowledgments (screenshot). I'm very grateful to my supervisors "", 'raw': ""I mean every word in my thesis acknowledgments (screenshot). I'm very grateful to my supervisors ""}, {'type': 'mention', 'user': 'vanatteveldt', 'raw': '@vanatteveldt'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'CasAndreu', 'raw': '@CasAndreu'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'KasperWelbers', 'raw': '@KasperWelbers'}, {'type': 'text', 'value': ' for their guidance; to ', 'raw': ' for their guidance; to '}, {'type': 'mention', 'user': 'profAndreaRenda', 'raw': '@profAndreaRenda'}, {'type': 'text', 'value': ' and @CEPS_thinktank for enabling me to work part-time during the first year; to ', 'raw': ' and @CEPS_thinktank for enabling me to work part-time during the first year; to '}, {'type': 'mention', 'user': 'huggingface', 'raw': '@huggingface'}, {'type': 'text', 'value': ' for creating awesome tools and an awesome platform; and to many others who are not active on social media.', 'raw': ' for creating awesome tools and an awesome platform; and to many others who are not active on social media.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Links to the full thesis and the collection of my most recent models are below.', 'raw': 'Links to the full thesis and the collection of my most recent models are below.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PS: If someone happens to speak Latin, let me know if my diploma contains some hidden Illuminati code or something :D', 'raw': 'PS: If someone happens to speak Latin, let me know if my diploma contains some hidden Illuminati code or something :D'}]","#phdone - I defended my PhD yesterday! A key lesson: it is amazing how open science and open source can empower beginners with limited resources: + +I first learned about instruction-based classifiers like BERT-NLI 3-4 years ago, through the @HuggingFace ZeroShotClassificationPipeline. Digging deeper into this, it was surprisingly easy to find new datasets, newer base models, and reusable fine-tuning scripts on the HF Hub to create my own zeroshot models - although I didn't know much about fine-tuning at the time. + +Thanks to the community effect of the Hub, my models were downloaded hundreds of thousands of times after a few months. Seeing my research being useful for people motivated me to improve and upload newer models. Leaving my contact details in the model cards led to academic cooperation and consulting contracts (and eventually my job at HF). + +That's the power of open science & open source: learning, sharing, improving, collaborating. + +I mean every word in my thesis acknowledgments (screenshot). I'm very grateful to my supervisors @vanatteveldt @CasAndreu @KasperWelbers for their guidance; to @profAndreaRenda and @CEPS_thinktank for enabling me to work part-time during the first year; to @huggingface for creating awesome tools and an awesome platform; and to many others who are not active on social media. + +Links to the full thesis and the collection of my most recent models are below. + +PS: If someone happens to speak Latin, let me know if my diploma contains some hidden Illuminati code or something :D","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fb15d1e84389b139cf3b508/QxHbp9tK96btcWfIR5tTz.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fb15d1e84389b139cf3b508/kav8Ze67UzYp59K9RHvYt.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fb15d1e84389b139cf3b508/QwbekRgOEp2uDkI0e6kQT.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fb15d1e84389b139cf3b508/tgQyaiKK_VDF6hM6D8w-6.jpeg'}]","[{'_id': '655b740b3f3e05685d777ef3', 'avatarUrl': '/avatars/d58b30fea905cfd7ac15e0ab30a43ac4.svg', 'fullname': 'Kasper Welbers', 'name': 'KasperWelbers', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '63fc9e7df67cec1f9f57b50a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63fc9e7df67cec1f9f57b50a/dX-SDTo0mUyjnAZxys7I1.png', 'fullname': 'Wouter', 'name': 'vanatteveldt', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '❤️', 'users': ['tomaarsen', 'clboetticher-hf', 'm-ric', 'celinah', 'bwilkinson', 'Near32', 'layperson99', 'clem', 'adamelliotfields', 'WaveCut', 'eepol', 'Exquisiteuser', 'Stopwolf', 'Oshan', 'agentlans', 'DavidGF', 'mlabonne', 'Aurelien-Morgan', 'ppsingh', 'Carlos3D', 'CptnPrice', 'madoss', 'aarabil', 'dalbrecht-xom', 'muhtasham'], 'count': 25}, {'reaction': '🤗', 'users': ['m-ric', 'fffiloni', 'a9i', 'Aurelien-Morgan', 'madoss'], 'count': 5}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '🚀', 'users': ['fffiloni'], 'count': 1}]",2024-10-03 11:08:12,2024-12-02 08:32:16.396,"[{'_id': '5fb15d1e84389b139cf3b508', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1613511937628-5fb15d1e84389b139cf3b508.jpeg', 'fullname': 'Moritz Laurer', 'name': 'MoritzLaurer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 346, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '65aaa5355860f06ff2a67c37', 'avatarUrl': '/avatars/9a825fdb025f6ebeb965017c95727b5c.svg', 'fullname': 'Antón Fernández Pérez', 'name': 'CptnPrice', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/MoritzLaurer/935333471653456,4747,,4 +/avatars/355d16e28ca9cf5891368e43bcda6de5.svg,19.0,Marco Pimentel,mpimentel,836944689044968,"[{'type': 'text', 'value': 'Just dropped a new blog post 🤗 ', 'raw': 'Just dropped a new blog post 🤗 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '""Comparing Open-source and Proprietary LLMs in Medical AI""', 'raw': '""Comparing Open-source and Proprietary LLMs in Medical AI""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We put the biggest AI brains 🧠 to the test on medical tasks 🏥 using popular benchmarks such as MedQA. ', 'raw': 'We put the biggest AI brains 🧠 to the test on medical tasks 🏥 using popular benchmarks such as MedQA. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Closed-source vs. open-source, costs decoded, and performance revealed! We need even more comprehensive benchmarks and evaluations of LLMs in medical tasks 🔬 ', 'raw': 'Closed-source vs. open-source, costs decoded, and performance revealed! We need even more comprehensive benchmarks and evaluations of LLMs in medical tasks 🔬 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read more at: ', 'raw': 'Read more at: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/mpimentel/comparing-llms-medical-ai', 'raw': 'https://huggingface.co/blog/mpimentel/comparing-llms-medical-ai'}]","Just dropped a new blog post 🤗 + +""Comparing Open-source and Proprietary LLMs in Medical AI"" + +We put the biggest AI brains 🧠 to the test on medical tasks 🏥 using popular benchmarks such as MedQA. + +Closed-source vs. open-source, costs decoded, and performance revealed! We need even more comprehensive benchmarks and evaluations of LLMs in medical tasks 🔬 + +Read more at: https://huggingface.co/blog/mpimentel/comparing-llms-medical-ai","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6454faafa13edf669cd74f36/kdNYj2xwkirdso1BaRG1N.png'}]",[],"[{'reaction': '🔥', 'users': ['pkanithi', 'ronnierajan', 'cchristophe', 'wadood', 'John6666', 'clem', 'Pomni', 'Jeszebel'], 'count': 8}]",2024-10-03 10:45:59,2024-10-03 10:56:23.070,[],/posts/mpimentel/836944689044968,1438,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/645b0c3ec35da9c7afd95421/vYBrCDagHsXAo6J2p-uG0.jpeg,23.0,Yuling,YerbaPage,892273134456397,"[{'type': 'text', 'value': 'We propose MGDebugger, a hierarchical bottom-up LLM code debugger 🔥 that can fix bugs from low-level syntax errors to high-level algorithmic flaws.', 'raw': 'We propose MGDebugger, a hierarchical bottom-up LLM code debugger 🔥 that can fix bugs from low-level syntax errors to high-level algorithmic flaws.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It achieves an ⭐️ 18.9% improvement in accuracy over seed generations in HumanEval and a ⭐️ 97.6% repair success rate in HumanEvalFix.', 'raw': 'It achieves an ⭐️ 18.9% improvement in accuracy over seed generations in HumanEval and a ⭐️ 97.6% repair success rate in HumanEvalFix.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper available at ', 'raw': 'Paper available at '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2410.01215', 'raw': 'https://arxiv.org/abs/2410.01215'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code and demo available at ', 'raw': 'Code and demo available at '}, {'type': 'link', 'href': 'https://github.com/YerbaPage/MGDebugger', 'raw': 'https://github.com/YerbaPage/MGDebugger'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}]","We propose MGDebugger, a hierarchical bottom-up LLM code debugger 🔥 that can fix bugs from low-level syntax errors to high-level algorithmic flaws. + +It achieves an ⭐️ 18.9% improvement in accuracy over seed generations in HumanEval and a ⭐️ 97.6% repair success rate in HumanEvalFix. + +Paper available at https://arxiv.org/abs/2410.01215. +Code and demo available at https://github.com/YerbaPage/MGDebugger. +",[],[],"[{'reaction': '👀', 'users': ['YerbaPage', 'louisbrulenaudet', 'John6666', 'Joseph717171', 'dingo-actual', 'ethancl1'], 'count': 6}, {'reaction': '👍', 'users': ['Tonic', 'amakipaa', 'Goekdeniz-Guelmez', 'Joseph717171', 'ethancl1', 'danchern'], 'count': 6}, {'reaction': '🤗', 'users': ['YerbaPage', 'ajibawa-2023', 'Goekdeniz-Guelmez', 'Joseph717171'], 'count': 4}, {'reaction': '🔥', 'users': ['YerbaPage', 'Joseph717171'], 'count': 2}, {'reaction': '🧠', 'users': ['Tonic'], 'count': 1}]",2024-10-03 06:41:44,2024-10-09 07:45:55.577,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '645b0c3ec35da9c7afd95421', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/645b0c3ec35da9c7afd95421/vYBrCDagHsXAo6J2p-uG0.jpeg', 'fullname': 'Yuling', 'name': 'YerbaPage', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23, 'isFollowing': False}]",/posts/YerbaPage/892273134456397,2079,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,639926000427051,"[{'type': 'text', 'value': 'Good folks at ', 'raw': 'Good folks at '}, {'type': 'mention', 'user': 'PyTorch', 'raw': '@PyTorch'}, {'type': 'text', 'value': ' have just released torchao, a game-changing library for native architecture optimization.', 'raw': ' have just released torchao, a game-changing library for native architecture optimization.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '-- How torchao Works (They threw the kitchen-sink at it...)', 'raw': '-- How torchao Works (They threw the kitchen-sink at it...)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""torchao leverages several advanced techniques to optimize PyTorch models, making them faster and more memory-efficient. Here's an overview of its key mechanisms:"", 'raw': ""torchao leverages several advanced techniques to optimize PyTorch models, making them faster and more memory-efficient. Here's an overview of its key mechanisms:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Quantization', 'raw': 'Quantization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'torchao employs various quantization methods to reduce model size and accelerate inference:', 'raw': 'torchao employs various quantization methods to reduce model size and accelerate inference:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Weight-only quantization: Converts model weights to lower precision formats like int4 or int8, significantly reducing memory usage.', 'raw': '• Weight-only quantization: Converts model weights to lower precision formats like int4 or int8, significantly reducing memory usage.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Dynamic activation quantization: Quantizes activations on-the-fly during inference, balancing performance and accuracy.', 'raw': '• Dynamic activation quantization: Quantizes activations on-the-fly during inference, balancing performance and accuracy.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Automatic quantization: The ', 'raw': '• Automatic quantization: The '}, {'type': 'inline_code', 'code': 'autoquant', 'raw': '`autoquant`'}, {'type': 'text', 'value': ' function intelligently selects the best quantization strategy for each layer in a model.', 'raw': ' function intelligently selects the best quantization strategy for each layer in a model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Low-bit Datatypes', 'raw': 'Low-bit Datatypes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The library utilizes low-precision datatypes to speed up computations:', 'raw': 'The library utilizes low-precision datatypes to speed up computations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• float8: Enables float8 training for linear layers, offering substantial speedups for large models like LLaMA 3 70B.', 'raw': '• float8: Enables float8 training for linear layers, offering substantial speedups for large models like LLaMA 3 70B.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• int4 and int8: Provide options for extreme compression of weights and activations.', 'raw': '• int4 and int8: Provide options for extreme compression of weights and activations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sparsity Techniques', 'raw': 'Sparsity Techniques'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'torchao implements sparsity methods to reduce model density:', 'raw': 'torchao implements sparsity methods to reduce model density:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Semi-sparse weights: Combine quantization with sparsity for compute-bound models.', 'raw': '• Semi-sparse weights: Combine quantization with sparsity for compute-bound models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'KV Cache Optimization', 'raw': 'KV Cache Optimization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For transformer-based models, torchao offers KV cache quantization, leading to significant VRAM reductions for long context lengths.', 'raw': 'For transformer-based models, torchao offers KV cache quantization, leading to significant VRAM reductions for long context lengths.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Integration with PyTorch Ecosystem', 'raw': 'Integration with PyTorch Ecosystem'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'torchao seamlessly integrates with existing PyTorch tools:', 'raw': 'torchao seamlessly integrates with existing PyTorch tools:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Compatible with ', 'raw': '• Compatible with '}, {'type': 'inline_code', 'code': 'torch.compile()', 'raw': '`torch.compile()`'}, {'type': 'text', 'value': ' for additional performance gains.', 'raw': ' for additional performance gains.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Works with FSDP2 for distributed training scenarios.', 'raw': '• Works with FSDP2 for distributed training scenarios.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Supports most PyTorch models available on Hugging Face out-of-the-box.', 'raw': '• Supports most PyTorch models available on Hugging Face out-of-the-box.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'By combining these techniques, torchao enables developers to significantly improve the performance and efficiency of their PyTorch models with minimal code changes and accuracy impact.', 'raw': 'By combining these techniques, torchao enables developers to significantly improve the performance and efficiency of their PyTorch models with minimal code changes and accuracy impact.'}]","Good folks at @PyTorch have just released torchao, a game-changing library for native architecture optimization. + +-- How torchao Works (They threw the kitchen-sink at it...) + +torchao leverages several advanced techniques to optimize PyTorch models, making them faster and more memory-efficient. Here's an overview of its key mechanisms: + +Quantization + +torchao employs various quantization methods to reduce model size and accelerate inference: + +• Weight-only quantization: Converts model weights to lower precision formats like int4 or int8, significantly reducing memory usage. +��� Dynamic activation quantization: Quantizes activations on-the-fly during inference, balancing performance and accuracy. +• Automatic quantization: The `autoquant` function intelligently selects the best quantization strategy for each layer in a model. + +Low-bit Datatypes + +The library utilizes low-precision datatypes to speed up computations: + +• float8: Enables float8 training for linear layers, offering substantial speedups for large models like LLaMA 3 70B. +• int4 and int8: Provide options for extreme compression of weights and activations. + +Sparsity Techniques + +torchao implements sparsity methods to reduce model density: + +• Semi-sparse weights: Combine quantization with sparsity for compute-bound models. + +KV Cache Optimization + +For transformer-based models, torchao offers KV cache quantization, leading to significant VRAM reductions for long context lengths. + +Integration with PyTorch Ecosystem + +torchao seamlessly integrates with existing PyTorch tools: + +• Compatible with `torch.compile()` for additional performance gains. +• Works with FSDP2 for distributed training scenarios. +• Supports most PyTorch models available on Hugging Face out-of-the-box. + +By combining these techniques, torchao enables developers to significantly improve the performance and efficiency of their PyTorch models with minimal code changes and accuracy impact.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/OQHsxF1cmSPypN0w47ny2.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'acamilogg88', 'louisbrulenaudet', 'Joseph717171'], 'count': 4}, {'reaction': '👀', 'users': ['John6666', 'osanseviero', 'Joseph717171'], 'count': 3}]",2024-10-03 03:28:49,2024-10-03 21:03:24.216,"[{'_id': '662bf5bfe93bb73804ef9344', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png', 'fullname': 'Kuldeep Singh Sidhu', 'name': 'singhsidhukuldeep', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 365, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '63c1dfd4a0ffa3857eb362a9', 'avatarUrl': '/avatars/ea4398745974d781ae9dc0e95b12cabe.svg', 'fullname': 'Joseph', 'name': 'Joseph717171', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}]",/posts/singhsidhukuldeep/639926000427051,1294,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,243951824255912,"[{'type': 'mention', 'user': 'mlabonne', 'raw': '@mlabonne'}, {'type': 'text', 'value': ' hey there 🙋🏻\u200d♂️ I kinda got obsessed with your great model , and i found the endpoint for it in lambda labs, but basically i got rate limited / banned for trying to make my DPO dataset project, i was wondering if you all had an open ai compatible solution for me to make a great ""thinking"" sft + dpo dataset with all the splits 🙏🏻🙏🏻 kinda desparate , it\'s true , but was looking forward to a nice write ups 🚀🚀🚀', 'raw': ' hey there 🙋🏻\u200d♂️ I kinda got obsessed with your great model , and i found the endpoint for it in lambda labs, but basically i got rate limited / banned for trying to make my DPO dataset project, i was wondering if you all had an open ai compatible solution for me to make a great ""thinking"" sft + dpo dataset with all the splits 🙏🏻🙏🏻 kinda desparate , it\'s true , but was looking forward to a nice write ups 🚀🚀🚀'}]","@mlabonne hey there 🙋🏻‍♂️ I kinda got obsessed with your great model , and i found the endpoint for it in lambda labs, but basically i got rate limited / banned for trying to make my DPO dataset project, i was wondering if you all had an open ai compatible solution for me to make a great ""thinking"" sft + dpo dataset with all the splits 🙏🏻🙏🏻 kinda desparate , it's true , but was looking forward to a nice write ups 🚀🚀🚀",[],"[{'_id': '61b8e2ba285851687028d395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/Rq3xWG7mJ3aCRoBsq340h.jpeg', 'fullname': 'Maxime Labonne', 'name': 'mlabonne', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5406}]","[{'reaction': '❤️', 'users': ['mlabonne', 'ggamecrazy', 'munyadev', 'leeloolee'], 'count': 4}, {'reaction': '👀', 'users': ['John6666', 'louisbrulenaudet', 'ggamecrazy'], 'count': 3}]",2024-10-02 17:22:49,2024-10-02 19:18:46.432,"[{'_id': '61b8e2ba285851687028d395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/Rq3xWG7mJ3aCRoBsq340h.jpeg', 'fullname': 'Maxime Labonne', 'name': 'mlabonne', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5406, 'isFollowing': False}]",/posts/Tonic/243951824255912,1805,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,956298330215271,"[{'type': 'text', 'value': 'Emu3: Next-token prediction conquers multimodal tasks 🔥', 'raw': 'Emu3: Next-token prediction conquers multimodal tasks 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is the most important research in months: we’re now very close to having a single architecture to handle all modalities. The folks at Beijing Academy of Artificial Intelligence (BAAI) just released Emu3, a single model that handles text, images, and videos all at once.', 'raw': 'This is the most important research in months: we’re now very close to having a single architecture to handle all modalities. The folks at Beijing Academy of Artificial Intelligence (BAAI) just released Emu3, a single model that handles text, images, and videos all at once.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""𝗪𝗵𝗮𝘁'𝘀 𝘁𝗵𝗲 𝗯𝗶𝗴 𝗱𝗲𝗮𝗹?"", 'raw': ""𝗪𝗵𝗮𝘁'𝘀 𝘁𝗵𝗲 𝗯𝗶𝗴 𝗱𝗲𝗮𝗹?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟 Emu3 is the first model to truly unify all these different types of data (text, images, video) using just one simple trick: predicting the next token.', 'raw': '🌟 Emu3 is the first model to truly unify all these different types of data (text, images, video) using just one simple trick: predicting the next token.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And it’s only 8B, but really strong:', 'raw': 'And it’s only 8B, but really strong:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🖼️ For image generation, it's matching the best specialized models out there, like SDXL."", 'raw': ""🖼️ For image generation, it's matching the best specialized models out there, like SDXL.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""👁️ In vision tasks, it's outperforming top models like LLaVA-1.6-7B, which is a big deal for a model that wasn't specifically designed for this."", 'raw': ""👁️ In vision tasks, it's outperforming top models like LLaVA-1.6-7B, which is a big deal for a model that wasn't specifically designed for this.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🎬 It's the first to nail video generation without using complicated diffusion techniques."", 'raw': ""🎬 It's the first to nail video generation without using complicated diffusion techniques.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗶𝘁 𝘄𝗼𝗿𝗸?', 'raw': '𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗶𝘁 𝘄𝗼𝗿𝗸?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧩 Emu3 uses a special tokenizer (SBER-MoVQGAN) to turn images and video clips into sequences of 4,096 tokens.', 'raw': '🧩 Emu3 uses a special tokenizer (SBER-MoVQGAN) to turn images and video clips into sequences of 4,096 tokens.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Then, it treats everything - text, images, and videos - as one long series of tokens to predict.', 'raw': '🔗 Then, it treats everything - text, images, and videos - as one long series of tokens to predict.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🔮 During training, it just tries to guess the next token, whether that's a word, part of an image, or a video frame."", 'raw': ""🔮 During training, it just tries to guess the next token, whether that's a word, part of an image, or a video frame.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗖𝗮𝘃𝗲𝗮𝘁𝘀 𝗼𝗻 𝘁𝗵𝗲 𝗿𝗲𝘀𝘂𝗹𝘁𝘀:', 'raw': '𝗖𝗮𝘃𝗲𝗮𝘁𝘀 𝗼𝗻 𝘁𝗵𝗲 𝗿𝗲𝘀𝘂𝗹𝘁𝘀:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 In image generation, Emu3 beats SDXL, but it’s also much bigger (8B vs 3.5B). It would be more difficult to beat the real diffusion GOAT FLUX-dev.', 'raw': '👉 In image generation, Emu3 beats SDXL, but it’s also much bigger (8B vs 3.5B). It would be more difficult to beat the real diffusion GOAT FLUX-dev.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 In vision, authors also don’t show a comparison against all the current SOTA models like Qwen-VL or Pixtral.', 'raw': '👉 In vision, authors also don’t show a comparison against all the current SOTA models like Qwen-VL or Pixtral.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This approach is exciting because it's simple (next token prediction) and scalable(handles all sorts of data)!"", 'raw': ""This approach is exciting because it's simple (next token prediction) and scalable(handles all sorts of data)!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the paper 👉 ', 'raw': 'Read the paper 👉 '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2409.18869'}, 'url': 'https://huggingface.co/papers/2409.18869', 'raw': 'https://huggingface.co/papers/2409.18869', 'label': 'Emu3: Next-Token Prediction is All You Need (2409.18869)'}]","Emu3: Next-token prediction conquers multimodal tasks 🔥 + +This is the most important research in months: we’re now very close to having a single architecture to handle all modalities. The folks at Beijing Academy of Artificial Intelligence (BAAI) just released Emu3, a single model that handles text, images, and videos all at once. + +𝗪𝗵𝗮𝘁'𝘀 𝘁𝗵𝗲 𝗯𝗶𝗴 𝗱𝗲𝗮𝗹? +🌟 Emu3 is the first model to truly unify all these different types of data (text, images, video) using just one simple trick: predicting the next token. +And it’s only 8B, but really strong: +🖼️ For image generation, it's matching the best specialized models out there, like SDXL. +👁️ In vision tasks, it's outperforming top models like LLaVA-1.6-7B, which is a big deal for a model that wasn't specifically designed for this. +🎬 It's the first to nail video generation without using complicated diffusion techniques. + +𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗶𝘁 𝘄𝗼𝗿𝗸? +🧩 Emu3 uses a special tokenizer (SBER-MoVQGAN) to turn images and video clips into sequences of 4,096 tokens. +🔗 Then, it treats everything - text, images, and videos - as one long series of tokens to predict. +🔮 During training, it just tries to guess the next token, whether that's a word, part of an image, or a video frame. + +𝗖𝗮𝘃𝗲𝗮𝘁𝘀 𝗼𝗻 𝘁𝗵𝗲 𝗿𝗲𝘀𝘂𝗹𝘁𝘀: +👉 In image generation, Emu3 beats SDXL, but it’s also much bigger (8B vs 3.5B). It would be more difficult to beat the real diffusion GOAT FLUX-dev. +👉 In vision, authors also don’t show a comparison against all the current SOTA models like Qwen-VL or Pixtral. + +This approach is exciting because it's simple (next token prediction) and scalable(handles all sorts of data)! + +Read the paper 👉 https://huggingface.co/papers/2409.18869","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/vMHyHj9jLYvFNnlg-5_rD.png'}]",[],"[{'reaction': '🔥', 'users': ['umair894', 'acamilogg88', 'lamhieu', 'Donutanti'], 'count': 4}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-10-02 16:19:49,2024-10-02 16:19:49.037,[],/posts/m-ric/956298330215271,1175,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,144143912433557,"[{'type': 'text', 'value': '🚀🕺🌟 New Research Alert - ECCV 2024 (Avatars Collection)! 🌟💃🚀', 'raw': '🚀🕺🌟 New Research Alert - ECCV 2024 (Avatars Collection)! 🌟💃🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: Expressive Whole-Body 3D Gaussian Avatar 🔝', 'raw': '📄 Title: Expressive Whole-Body 3D Gaussian Avatar 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: ExAvatar is a model that generates animatable 3D human avatars with facial expressions and hand movements from short monocular videos using a hybrid mesh and 3D Gaussian representation.', 'raw': '📝 Description: ExAvatar is a model that generates animatable 3D human avatars with facial expressions and hand movements from short monocular videos using a hybrid mesh and 3D Gaussian representation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Gyeongsik Moon, Takaaki Shiratori, and ', 'raw': '👥 Authors: Gyeongsik Moon, Takaaki Shiratori, and '}, {'type': 'mention', 'user': 'psyth', 'raw': '@psyth'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: ECCV, 29 Sep – 4 Oct, 2024 | Milano, Italy 🇮🇹', 'raw': '📅 Conference: ECCV, 29 Sep – 4 Oct, 2024 | Milano, Italy 🇮🇹'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2407.08414'}, 'url': 'https://huggingface.co/papers/2407.08414', 'raw': 'https://huggingface.co/papers/2407.08414', 'label': 'MeshAvatar: Learning High-quality Triangular Human Avatars from\n Multi-view Videos (2407.08414)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2407.21686'}, 'url': 'https://huggingface.co/papers/2407.21686', 'raw': 'https://huggingface.co/papers/2407.21686', 'label': 'Expressive Whole-Body 3D Gaussian Avatar (2407.21686)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://mks0601.github.io/ExAvatar', 'raw': 'https://mks0601.github.io/ExAvatar'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/mks0601/ExAvatar_RELEASE', 'raw': 'https://github.com/mks0601/ExAvatar_RELEASE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 CVPR-2023-24-Papers: ', 'raw': '🚀 CVPR-2023-24-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/CVPR-2023-24-Papers', 'raw': 'https://github.com/DmitryRyumin/CVPR-2023-24-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 WACV-2024-Papers: ', 'raw': '🚀 WACV-2024-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/WACV-2024-Papers', 'raw': 'https://github.com/DmitryRyumin/WACV-2024-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 ICCV-2023-Papers: ', 'raw': '🚀 ICCV-2023-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/ICCV-2023-Papers', 'raw': 'https://github.com/DmitryRyumin/ICCV-2023-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #ExAvatar #3DAvatar #FacialExpressions #HandMotions #MonocularVideo #3DModeling #GaussianSplatting #MachineLearning #ComputerVision #ComputerGraphics #DeepLearning #AI #ECCV2024', 'raw': '🔍 Keywords: #ExAvatar #3DAvatar #FacialExpressions #HandMotions #MonocularVideo #3DModeling #GaussianSplatting #MachineLearning #ComputerVision #ComputerGraphics #DeepLearning #AI #ECCV2024'}]","🚀🕺🌟 New Research Alert - ECCV 2024 (Avatars Collection)! 🌟💃🚀 +📄 Title: Expressive Whole-Body 3D Gaussian Avatar 🔝 + +📝 Description: ExAvatar is a model that generates animatable 3D human avatars with facial expressions and hand movements from short monocular videos using a hybrid mesh and 3D Gaussian representation. + +👥 Authors: Gyeongsik Moon, Takaaki Shiratori, and @psyth + +📅 Conference: ECCV, 29 Sep – 4 Oct, 2024 | Milano, Italy 🇮🇹 + +📄 Paper: https://huggingface.co/papers/2407.08414 + +📄 Paper: https://huggingface.co/papers/2407.21686 + +🌐 Github Page: https://mks0601.github.io/ExAvatar +📁 Repository: https://github.com/mks0601/ExAvatar_RELEASE + +🚀 CVPR-2023-24-Papers: https://github.com/DmitryRyumin/CVPR-2023-24-Papers + +🚀 WACV-2024-Papers: https://github.com/DmitryRyumin/WACV-2024-Papers + +🚀 ICCV-2023-Papers: https://github.com/DmitryRyumin/ICCV-2023-Papers + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #ExAvatar #3DAvatar #FacialExpressions #HandMotions #MonocularVideo #3DModeling #GaussianSplatting #MachineLearning #ComputerVision #ComputerGraphics #DeepLearning #AI #ECCV2024","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/1JO_jBZZedVsZJzTG3Zg6.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/lJ9eMw_nP-K_mNde-uy4e.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/YtjofFsSAc0xbyFjHgIYr.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/cqdAHJIockuC6N18lHxiF.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/j8s2R1YAhxKlWQJHI4vxe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/t4OvsKQdKZsIFXACEa9my.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/nO_9x8ItCEJTeRBFqMXaE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/2lNXvBFkAnVzlIoR5w_QR.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/ZU5en24HAmT837QXfB5x6.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}, {'_id': '630688edd37ce67e0e50e548', 'avatarUrl': '/avatars/1681e465d7649f67f94e1b69d236cb1e.svg', 'fullname': 'Shunsuke Saito', 'name': 'psyth', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}]","[{'reaction': '🔥', 'users': ['DmitryRyumin', 'John6666', 'edgar222', 'davidberenstein1957', 'xepile', 'art-bashkirev'], 'count': 6}, {'reaction': '🤗', 'users': ['DmitryRyumin', 'edgar222', 'davidberenstein1957', 'sunnyient', 'art-bashkirev'], 'count': 5}]",2024-09-26 18:23:14,2024-09-26 18:23:14.692,[],/posts/DmitryRyumin/144143912433557,2140,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,384487520549880,"[{'type': 'text', 'value': 'Good folks at Meta has just unveiled Llama 3.2, pushing the boundaries of language models and computer vision.', 'raw': 'Good folks at Meta has just unveiled Llama 3.2, pushing the boundaries of language models and computer vision.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Even more interesting is how they trained this cutting-edge model:', 'raw': 'Even more interesting is how they trained this cutting-edge model:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Architecture:', 'raw': '1️⃣ Architecture:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Llama 3.2 uses an optimized transformer architecture with auto-regressive capabilities. The largest models (11B and 90B) now support multimodal inputs, integrating both text and images.', 'raw': 'Llama 3.2 uses an optimized transformer architecture with auto-regressive capabilities. The largest models (11B and 90B) now support multimodal inputs, integrating both text and images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Training Pipeline:', 'raw': '2️⃣ Training Pipeline:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Started with pretrained Llama 3.1 text models', 'raw': '• Started with pretrained Llama 3.1 text models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Added image adapters and encoders', 'raw': '• Added image adapters and encoders'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Pretrained on large-scale noisy (image, text) pair data', 'raw': '• Pretrained on large-scale noisy (image, text) pair data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Fine-tuned on high-quality in-domain and knowledge-enhanced (image, text) pairs', 'raw': '• Fine-tuned on high-quality in-domain and knowledge-enhanced (image, text) pairs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Vision Integration:', 'raw': '3️⃣ Vision Integration:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Trained adapter weights to integrate a pre-trained image encoder', 'raw': '• Trained adapter weights to integrate a pre-trained image encoder'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Used cross-attention layers to feed image representations into the language model', 'raw': '• Used cross-attention layers to feed image representations into the language model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Preserved text-only capabilities by not updating language model parameters during adapter training', 'raw': '• Preserved text-only capabilities by not updating language model parameters during adapter training'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4️⃣ Post-Training Alignment:', 'raw': '4️⃣ Post-Training Alignment:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Multiple rounds of supervised fine-tuning (SFT)', 'raw': '• Multiple rounds of supervised fine-tuning (SFT)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Rejection sampling (RS)', 'raw': '• Rejection sampling (RS)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Direct preference optimization (DPO)', 'raw': '• Direct preference optimization (DPO)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Synthetic data generation using Llama 3.1 for Q&A augmentation', 'raw': '• Synthetic data generation using Llama 3.1 for Q&A augmentation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Reward model ranking for high-quality fine-tuning data', 'raw': '• Reward model ranking for high-quality fine-tuning data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5️⃣ Lightweight Models:', 'raw': '5️⃣ Lightweight Models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Used pruning and distillation techniques for 1B and 3B models', 'raw': '• Used pruning and distillation techniques for 1B and 3B models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Structured pruning from Llama 3.1 8B model', 'raw': '• Structured pruning from Llama 3.1 8B model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Knowledge distillation using Llama 3.1 8B and 70B as teachers', 'raw': '• Knowledge distillation using Llama 3.1 8B and 70B as teachers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6️⃣ Context Length:', 'raw': '6️⃣ Context Length:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All models support an impressive 128K token context length.', 'raw': 'All models support an impressive 128K token context length.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7️⃣ Safety Measures:', 'raw': '7️⃣ Safety Measures:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Incorporated safety mitigation data to balance helpfulness and safety.', 'raw': 'Incorporated safety mitigation data to balance helpfulness and safety.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The result? A suite of models ranging from edge-friendly 1B parameters to powerful 90B parameter versions, capable of sophisticated reasoning across text and images. Llama 3.2 is set to revolutionize AI applications from mobile devices to enterprise-scale solutions.', 'raw': 'The result? A suite of models ranging from edge-friendly 1B parameters to powerful 90B parameter versions, capable of sophisticated reasoning across text and images. Llama 3.2 is set to revolutionize AI applications from mobile devices to enterprise-scale solutions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""What are your thoughts on these advancements? How do you see Llama 3.2 impacting your industry? Let's discuss in the comments!"", 'raw': ""What are your thoughts on these advancements? How do you see Llama 3.2 impacting your industry? Let's discuss in the comments!""}, {'type': 'new_line', 'raw': '\n'}]","Good folks at Meta has just unveiled Llama 3.2, pushing the boundaries of language models and computer vision. + +Even more interesting is how they trained this cutting-edge model: + +1️⃣ Architecture: +Llama 3.2 uses an optimized transformer architecture with auto-regressive capabilities. The largest models (11B and 90B) now support multimodal inputs, integrating both text and images. + +2️⃣ Training Pipeline: +• Started with pretrained Llama 3.1 text models +• Added image adapters and encoders +• Pretrained on large-scale noisy (image, text) pair data +• Fine-tuned on high-quality in-domain and knowledge-enhanced (image, text) pairs + +3️⃣ Vision Integration: +• Trained adapter weights to integrate a pre-trained image encoder +• Used cross-attention layers to feed image representations into the language model +• Preserved text-only capabilities by not updating language model parameters during adapter training + +4️⃣ Post-Training Alignment: +• Multiple rounds of supervised fine-tuning (SFT) +• Rejection sampling (RS) +• Direct preference optimization (DPO) +• Synthetic data generation using Llama 3.1 for Q&A augmentation +• Reward model ranking for high-quality fine-tuning data + +5️⃣ Lightweight Models: +• Used pruning and distillation techniques for 1B and 3B models +• Structured pruning from Llama 3.1 8B model +• Knowledge distillation using Llama 3.1 8B and 70B as teachers + +6️⃣ Context Length: +All models support an impressive 128K token context length. + +7️⃣ Safety Measures: +Incorporated safety mitigation data to balance helpfulness and safety. + +The result? A suite of models ranging from edge-friendly 1B parameters to powerful 90B parameter versions, capable of sophisticated reasoning across text and images. Llama 3.2 is set to revolutionize AI applications from mobile devices to enterprise-scale solutions. + +What are your thoughts on these advancements? How do you see Llama 3.2 impacting your industry? Let's discuss in the comments! +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/mlNuy6PU09plPJxUt6ugg.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['fffiloni', 'KingNish', 'fsommers', 'skalaliya', 'John6666', 'louisbrulenaudet', 'adamelliotfields', 'emidiosouza', 'AtAndDev'], 'count': 9}, {'reaction': '👍', 'users': ['noobmldude', 'skalaliya', 'ShaneTian', 'Hecdin', 'AtAndDev'], 'count': 5}]",2024-09-26 16:31:56,2024-09-26 16:31:56.482,[],/posts/singhsidhukuldeep/384487520549880,2631,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/66b05ca6e7c57eac7cafbbc4/nddUkS3xu78cxCS-r7-xB.jpeg,36.0,Ann Huang,erinys,719995159142282,"[{'type': 'text', 'value': ' We did a thing! Eight weeks into our Hugging Face tenure, we can demo a round-trip of Xet-backed files from our local machine to a prod Hugging Face S3 bucket and back. 🚀', 'raw': ' We did a thing! Eight weeks into our Hugging Face tenure, we can demo a round-trip of Xet-backed files from our local machine to a prod Hugging Face S3 bucket and back. 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It’s been exciting to dive into how the Hub is built and design our steel thread through the infrastructure. Now that the thread is up, we can kick off project Capacious Extremis 🪄 to add all the other goodies: authentication, authorization, deduplication, privacy, and more. ', 'raw': 'It’s been exciting to dive into how the Hub is built and design our steel thread through the infrastructure. Now that the thread is up, we can kick off project Capacious Extremis 🪄 to add all the other goodies: authentication, authorization, deduplication, privacy, and more. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""What does this mean for you? You’re one step closer to ⚡ faster downloads, uploads, and iterative development on Hugging Face Hub!\u2028This is our first step toward replacing Git LFS as the Hub's storage backend: "", 'raw': ""What does this mean for you? You’re one step closer to ⚡ faster downloads, uploads, and iterative development on Hugging Face Hub!\u2028This is our first step toward replacing Git LFS as the Hub's storage backend: ""}, {'type': 'link', 'href': 'https://huggingface.co/blog/xethub-joins-hf', 'raw': 'https://huggingface.co/blog/xethub-joins-hf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the demo on LinkedIn to see the transfer in action: ', 'raw': 'Check out the demo on LinkedIn to see the transfer in action: '}, {'type': 'link', 'href': 'https://www.linkedin.com/posts/annux_youve-heard-of-blue-steel-but-have-activity-7245062126535405568-3cvJ', 'raw': 'https://www.linkedin.com/posts/annux_youve-heard-of-blue-steel-but-have-activity-7245062126535405568-3cvJ'}]"," We did a thing! Eight weeks into our Hugging Face tenure, we can demo a round-trip of Xet-backed files from our local machine to a prod Hugging Face S3 bucket and back. 🚀 + +It’s been exciting to dive into how the Hub is built and design our steel thread through the infrastructure. Now that the thread is up, we can kick off project Capacious Extremis 🪄 to add all the other goodies: authentication, authorization, deduplication, privacy, and more. + +What does this mean for you? You’re one step closer to ⚡ faster downloads, uploads, and iterative development on Hugging Face Hub!
This is our first step toward replacing Git LFS as the Hub's storage backend: https://huggingface.co/blog/xethub-joins-hf + +Check out the demo on LinkedIn to see the transfer in action: https://www.linkedin.com/posts/annux_youve-heard-of-blue-steel-but-have-activity-7245062126535405568-3cvJ",[],[],"[{'reaction': '🔥', 'users': ['jsulz', 'John6666', 'davidberenstein1957', 'gabrielmbmb', 'clem'], 'count': 5}]",2024-09-26 16:07:27,2024-09-26 16:10:48.832,[],/posts/erinys/719995159142282,1386,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,988164848850810,"[{'type': 'text', 'value': 'My earliest / most popular AI example of using ', 'raw': 'My earliest / most popular AI example of using '}, {'type': 'mention', 'user': 'pytorch', 'raw': '@pytorch'}, {'type': 'text', 'value': ' for art has been updated with AI Pair Programming (AIPP) to use resolution of original inputs with ', 'raw': ' for art has been updated with AI Pair Programming (AIPP) to use resolution of original inputs with '}, {'type': 'mention', 'user': 'gradio', 'raw': '@gradio'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'AnthropicAI', 'raw': '@AnthropicAI'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For anyone learning AI model development - this is a great starter!', 'raw': 'For anyone learning AI model development - this is a great starter!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/Image-to-Line-Drawings'}, 'url': 'https://huggingface.co/spaces/awacke1/Image-to-Line-Drawings', 'raw': 'https://huggingface.co/spaces/awacke1/Image-to-Line-Drawings'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","My earliest / most popular AI example of using @pytorch for art has been updated with AI Pair Programming (AIPP) to use resolution of original inputs with @gradio and @AnthropicAI + +For anyone learning AI model development - this is a great starter! + +https://huggingface.co/spaces/awacke1/Image-to-Line-Drawings + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/1L0FQKTmGXtyH2KwvkBjR.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-26 15:40:51,2024-09-26 15:40:51.057,[],/posts/awacke1/988164848850810,760,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/626505d493e0b04d75710566/9rfJc9ORXU9J5a42Ev3v6.png,118.0,Stefano Fiorucci,anakin87,772003078630193,"[{'type': 'text', 'value': '🕵🏻 𝐀𝐠𝐞𝐧𝐭𝐢𝐜 𝐑𝐀𝐆 𝐰𝐢𝐭𝐡 🦙 𝐋𝐥𝐚𝐦𝐚 3.2', 'raw': '🕵🏻 𝐀𝐠𝐞𝐧𝐭𝐢𝐜 𝐑𝐀𝐆 𝐰𝐢𝐭𝐡 🦙 𝐋𝐥𝐚𝐦𝐚 3.2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I was excited to explore Llama 3.2, but as a simple 🇪🇺 EU guy, I don't have access to Meta's multimodal models 😿"", 'raw': ""I was excited to explore Llama 3.2, but as a simple 🇪🇺 EU guy, I don't have access to Meta's multimodal models 😿""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤔 So I thought: why not challenge the small 3B text model with Agentic RAG?', 'raw': '🤔 So I thought: why not challenge the small 3B text model with Agentic RAG?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 The plan:', 'raw': '🎯 The plan:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Build a system that tries to answer questions using a knowledge base.', 'raw': '- Build a system that tries to answer questions using a knowledge base.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- If the documents don't contain the answer, use Web search for additional context."", 'raw': ""- If the documents don't contain the answer, use Web search for additional context.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out my experimental notebook here: 📓 ', 'raw': 'Check out my experimental notebook here: 📓 '}, {'type': 'link', 'href': 'https://colab.research.google.com/github/deepset-ai/haystack-cookbook/blob/main/notebooks/llama32_agentic_rag.ipynb', 'raw': 'https://colab.research.google.com/github/deepset-ai/haystack-cookbook/blob/main/notebooks/llama32_agentic_rag.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My stack:', 'raw': 'My stack:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏗️ haystack (', 'raw': '🏗️ haystack ('}, {'type': 'link', 'href': 'https://haystack.deepset.ai/', 'raw': 'https://haystack.deepset.ai/'}, {'type': 'text', 'value': '): open-source LLM orchestration framework', 'raw': '): open-source LLM orchestration framework'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦙 ', 'raw': '🦙 '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meta-llama/Llama-3.2-3B-Instruct'}, 'url': 'https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct', 'raw': 'https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦆🌐 free DuckDuckGo API, integrated with Haystack', 'raw': '🦆🌐 free DuckDuckGo API, integrated with Haystack'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""✨ 𝘛𝘩𝘦 𝘳𝘦𝘴𝘶𝘭𝘵𝘴? 𝘌𝘯𝘤𝘰𝘶𝘳𝘢𝘨𝘪𝘯𝘨 - 𝘢 𝘧𝘦𝘸 𝘮𝘰𝘯𝘵𝘩𝘴 𝘢𝘨𝘰, 𝘵𝘩𝘪𝘴 𝘭𝘦𝘷𝘦𝘭 𝘰𝘧 𝘱𝘦𝘳𝘧𝘰𝘳𝘮𝘢𝘯𝘤𝘦 𝘧𝘳𝘰𝘮 𝘢 𝘴𝘮𝘢𝘭𝘭 𝘮𝘰𝘥𝘦𝘭 𝘸𝘰𝘶𝘭𝘥'𝘷𝘦 𝘣𝘦𝘦𝘯 𝘶𝘯𝘵𝘩𝘪𝘯𝘬𝘢𝘣𝘭𝘦!"", 'raw': ""✨ 𝘛𝘩𝘦 𝘳𝘦𝘴𝘶𝘭𝘵𝘴? 𝘌𝘯𝘤𝘰𝘶𝘳𝘢𝘨𝘪𝘯𝘨 - 𝘢 𝘧𝘦𝘸 𝘮𝘰𝘯𝘵𝘩𝘴 𝘢𝘨𝘰, 𝘵𝘩𝘪𝘴 𝘭𝘦𝘷𝘦𝘭 𝘰𝘧 𝘱𝘦𝘳𝘧𝘰𝘳𝘮𝘢𝘯𝘤𝘦 𝘧𝘳𝘰𝘮 𝘢 𝘴𝘮𝘢𝘭𝘭 𝘮𝘰𝘥𝘦𝘭 𝘸𝘰𝘶𝘭𝘥'𝘷𝘦 𝘣𝘦𝘦𝘯 𝘶𝘯𝘵𝘩𝘪𝘯𝘬𝘢𝘣𝘭𝘦!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This probably reflects the impressive IFEval score of the model (comparable to Llama 3.1 8B).', 'raw': 'This probably reflects the impressive IFEval score of the model (comparable to Llama 3.1 8B).'}]","🕵🏻 𝐀𝐠𝐞𝐧𝐭𝐢𝐜 𝐑𝐀𝐆 𝐰𝐢𝐭𝐡 🦙 𝐋𝐥𝐚𝐦𝐚 3.2 + +I was excited to explore Llama 3.2, but as a simple 🇪🇺 EU guy, I don't have access to Meta's multimodal models 😿 + +🤔 So I thought: why not challenge the small 3B text model with Agentic RAG? + +🎯 The plan: +- Build a system that tries to answer questions using a knowledge base. +- If the documents don't contain the answer, use Web search for additional context. + + +Check out my experimental notebook here: 📓 https://colab.research.google.com/github/deepset-ai/haystack-cookbook/blob/main/notebooks/llama32_agentic_rag.ipynb + + +My stack: +🏗️ haystack (https://haystack.deepset.ai/): open-source LLM orchestration framework +🦙 https://huggingface.co/meta-llama/Llama-3.2-3B-Instruct +🦆🌐 free DuckDuckGo API, integrated with Haystack + +✨ 𝘛𝘩𝘦 𝘳𝘦𝘴𝘶𝘭𝘵𝘴? 𝘌𝘯𝘤𝘰𝘶𝘳𝘢𝘨𝘪𝘯𝘨 - 𝘢 𝘧𝘦𝘸 𝘮𝘰𝘯𝘵𝘩𝘴 𝘢𝘨𝘰, 𝘵𝘩𝘪𝘴 𝘭𝘦𝘷𝘦𝘭 𝘰𝘧 𝘱𝘦𝘳𝘧𝘰𝘳𝘮𝘢𝘯𝘤𝘦 𝘧𝘳𝘰𝘮 𝘢 𝘴𝘮𝘢𝘭𝘭 𝘮𝘰𝘥𝘦𝘭 𝘸𝘰𝘶𝘭𝘥'𝘷𝘦 𝘣𝘦𝘦𝘯 𝘶𝘯𝘵𝘩𝘪𝘯𝘬𝘢𝘣𝘭𝘦! +This probably reflects the impressive IFEval score of the model (comparable to Llama 3.1 8B).",[],[],"[{'reaction': '👍', 'users': ['asoria', 'emanuelaboros', 'John6666', 'PKPL', 'Marvin73', 'ZNDHAESE'], 'count': 6}]",2024-09-26 14:05:14,2024-09-27 07:56:03.257,[],/posts/anakin87/772003078630193,1767,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,560314856478741,"[{'type': 'text', 'value': 'Transformers v4.45.0 released: includes a lightning-fast method to build tools! ⚡️', 'raw': 'Transformers v4.45.0 released: includes a lightning-fast method to build tools! ⚡️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'During user research with colleagues ', 'raw': 'During user research with colleagues '}, {'type': 'mention', 'user': 'MoritzLaurer', 'raw': '@MoritzLaurer'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'Jofthomas', 'raw': '@Jofthomas'}, {'type': 'text', 'value': ' , we discovered that the class definition currently in used to define a Tool in ', 'raw': ' , we discovered that the class definition currently in used to define a Tool in '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'inline_code', 'code': 'transformers.agents', 'raw': '`transformers.agents`'}, {'type': 'text', 'value': ' is a bit tedious to use, because it goes in great detail.', 'raw': ' is a bit tedious to use, because it goes in great detail.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ So I’ve made an easier way to build tools: just make a function with type hints + a docstring, and add a ', 'raw': '➡️ So I’ve made an easier way to build tools: just make a function with type hints + a docstring, and add a '}, {'type': 'mention', 'user': 'tool', 'raw': '@tool'}, {'type': 'text', 'value': ' decorator in front.', 'raw': ' decorator in front.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅\xa0Voilà, you’re good to go!', 'raw': '✅\xa0Voilà, you’re good to go!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read all about it in the new doc here: ', 'raw': 'Read all about it in the new doc here: '}, {'type': 'link', 'href': 'https://huggingface.co/docs/transformers/main/en/agents#create-a-new-tool', 'raw': 'https://huggingface.co/docs/transformers/main/en/agents#create-a-new-tool'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And don’t hesitate to give feedback, I’m all ears! 🤗', 'raw': 'And don’t hesitate to give feedback, I’m all ears! 🤗'}]","Transformers v4.45.0 released: includes a lightning-fast method to build tools! ⚡️ + +During user research with colleagues @MoritzLaurer and @Jofthomas , we discovered that the class definition currently in used to define a Tool in +`transformers.agents` is a bit tedious to use, because it goes in great detail. + +➡️ So I’ve made an easier way to build tools: just make a function with type hints + a docstring, and add a @tool decorator in front. + +✅ Voilà, you’re good to go! + +Read all about it in the new doc here: https://huggingface.co/docs/transformers/main/en/agents#create-a-new-tool + +And don’t hesitate to give feedback, I’m all ears! 🤗","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/eGY-NKKcu77WNCQ7haIje.png'}]","[{'_id': '64257c616d0f0f5f1dc6aa2a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64257c616d0f0f5f1dc6aa2a/WNXC2PcyDn-jt9ZY5Rbka.jpeg', 'fullname': 'Joffrey THOMAS', 'name': 'Jofthomas', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3289}, {'_id': '5fb15d1e84389b139cf3b508', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1613511937628-5fb15d1e84389b139cf3b508.jpeg', 'fullname': 'Moritz Laurer', 'name': 'MoritzLaurer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 346}, {'_id': '63148ec88be84e1ab40e6ee5', 'avatarUrl': '/avatars/3c4b303dcc49850fa50e782f8f2624b5.svg', 'fullname': 'Parabola', 'name': 'Tool', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '🔥', 'users': ['Jofthomas', 'John6666', 'alielfilali01', 'AtAndDev', 'davidrd123', 'MoritzLaurer'], 'count': 6}, {'reaction': '❤️', 'users': ['louisbrulenaudet'], 'count': 1}]",2024-09-26 10:17:17,2024-09-26 10:17:17.210,[],/posts/m-ric/560314856478741,1546,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,371969196901274,"[{'type': 'text', 'value': 'Want to get familiar with llama 3.2 and an actual example of structured data generation? Try out the Google Colab I created.', 'raw': 'Want to get familiar with llama 3.2 and an actual example of structured data generation? Try out the Google Colab I created.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Notebook: ', 'raw': 'Notebook: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1nHNXUbgwRMyjFeBZbQvNLqaXGkqE4Wcs?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1nHNXUbgwRMyjFeBZbQvNLqaXGkqE4Wcs?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'hugging-quants/Llama-3.2-3B-Instruct-Q8_0-GGUF'}, 'url': 'https://huggingface.co/hugging-quants/Llama-3.2-3B-Instruct-Q8_0-GGUF/tree/main', 'raw': 'https://huggingface.co/hugging-quants/Llama-3.2-3B-Instruct-Q8_0-GGUF/tree/main'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'argilla/FinePersonas-v0.1'}, 'url': 'https://huggingface.co/datasets/argilla/FinePersonas-v0.1', 'raw': 'https://huggingface.co/datasets/argilla/FinePersonas-v0.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Library: ', 'raw': 'Library: '}, {'type': 'link', 'href': 'https://github.com/argilla-io/distilabel', 'raw': 'https://github.com/argilla-io/distilabel'}]","Want to get familiar with llama 3.2 and an actual example of structured data generation? Try out the Google Colab I created. + +Notebook: https://colab.research.google.com/drive/1nHNXUbgwRMyjFeBZbQvNLqaXGkqE4Wcs?usp=sharing +Model: https://huggingface.co/hugging-quants/Llama-3.2-3B-Instruct-Q8_0-GGUF/tree/main +Dataset: https://huggingface.co/datasets/argilla/FinePersonas-v0.1 +Library: https://github.com/argilla-io/distilabel",[],[],"[{'reaction': '👍', 'users': ['Kasnol', 'John6666', 'adamelliotfields', 'LeroyDyer', 'vinhnx90'], 'count': 5}, {'reaction': '🔥', 'users': ['asoria', 'Kaligraphy247'], 'count': 2}, {'reaction': '😎', 'users': ['NickyNicky'], 'count': 1}]",2024-09-26 08:00:31,2024-09-26 08:00:31.324,[],/posts/davidberenstein1957/371969196901274,1452,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a813dedbb9e28866a91b27/2fknEF_u6StSjp3uUF144.png,41.0,appvoid,appvoid,363699335352855,"[{'type': 'text', 'value': 'meta just released 1b parameters model and to honor it i released arco 2 just in time for the fine-tuners to tweak around, enjoy these small powerful language models!!!', 'raw': 'meta just released 1b parameters model and to honor it i released arco 2 just in time for the fine-tuners to tweak around, enjoy these small powerful language models!!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meta-llama/Llama-3.2-1B'}, 'url': 'https://huggingface.co/meta-llama/Llama-3.2-1B', 'raw': 'https://huggingface.co/meta-llama/Llama-3.2-1B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'appvoid/arco-2'}, 'url': 'https://huggingface.co/appvoid/arco-2', 'raw': 'https://huggingface.co/appvoid/arco-2'}]","meta just released 1b parameters model and to honor it i released arco 2 just in time for the fine-tuners to tweak around, enjoy these small powerful language models!!! + +https://huggingface.co/meta-llama/Llama-3.2-1B +https://huggingface.co/appvoid/arco-2","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62a813dedbb9e28866a91b27/KPTzttQs3icfmTdspn1sx.png'}]",[],"[{'reaction': '👍', 'users': ['iammaggie', 'John6666', 'bfuzzy1', 'darkzbaron', 'Izac'], 'count': 5}, {'reaction': '🔥', 'users': ['aiisthebest'], 'count': 1}]",2024-09-25 21:17:25,2024-09-26 04:55:58.295,"[{'_id': '64cb275b38837b12d50c32bc', 'avatarUrl': '/avatars/89995841d20aecc23e95a9cf088e33c0.svg', 'fullname': 'CHRIS MCGUIRE', 'name': 'iammaggie', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/appvoid/363699335352855,1827,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,973599697871972,"[{'type': 'text', 'value': 'A big day for multimodal models! ', 'raw': 'A big day for multimodal models! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Llama 3.2 is out with a major update: it can now process images.', 'raw': 'Llama 3.2 is out with a major update: it can now process images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key highlights:', 'raw': 'Key highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 11B and 90B vision models', 'raw': '• 11B and 90B vision models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Small 1B and 3B text models for mobile devices', 'raw': '• Small 1B and 3B text models for mobile devices'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Eval results already on the leaderboard: ', 'raw': 'Eval results already on the leaderboard: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'open-llm-leaderboard/open_llm_leaderboard'}, 'url': 'https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard', 'raw': 'https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection: ', 'raw': 'Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'meta-llama/llama-32-66f448ffc8c32f949b04c8cf'}, 'url': 'https://huggingface.co/collections/meta-llama/llama-32-66f448ffc8c32f949b04c8cf', 'raw': 'https://huggingface.co/collections/meta-llama/llama-32-66f448ffc8c32f949b04c8cf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","A big day for multimodal models! +Llama 3.2 is out with a major update: it can now process images. + +Key highlights: +• 11B and 90B vision models +• Small 1B and 3B text models for mobile devices + +Eval results already on the leaderboard: https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard +Collection: https://huggingface.co/collections/meta-llama/llama-32-66f448ffc8c32f949b04c8cf + + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/RUeWvjsnlfbeCnMNjG2d8.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['sachithgunasekara', 'YaTharThShaRma999', 'Joseph717171', 'John6666', 'DmitryRyumin', 'ajibawa-2023', 'bfuzzy1', 'Stanlv', 'aiisthebest', 'louisbrulenaudet', 'Salvor', 'davanstrien', 'Mackin7'], 'count': 13}, {'reaction': '🚀', 'users': ['Joseph717171', 'den0620', 'bfuzzy1', 'Stanlv', 'davanstrien'], 'count': 5}, {'reaction': '🤗', 'users': ['Joseph717171', 'DmitryRyumin'], 'count': 2}]",2024-09-25 18:40:52,2024-09-26 11:55:49.018,"[{'_id': '66ab46e650bd6711f3b55d60', 'avatarUrl': '/avatars/16f5824b31a1de65ea12272ad990d932.svg', 'fullname': 'Ai Programer', 'name': 'aiisthebest', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/fdaudens/973599697871972,3334,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,563411677031749,"[{'type': 'text', 'value': 'Exciting news in AI: Molmo, a groundbreaking family of open-source multimodal models, has just been announced! 🚀', 'raw': 'Exciting news in AI: Molmo, a groundbreaking family of open-source multimodal models, has just been announced! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key points:', 'raw': 'Key points:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Closes the gap with proprietary systems on benchmarks & human evals', 'raw': '- Closes the gap with proprietary systems on benchmarks & human evals'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Trained on high-quality data (< 1M image-text pairs vs billions)', 'raw': '- Trained on high-quality data (< 1M image-text pairs vs billions)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Introduces pointing capability for rich interactions', 'raw': '- Introduces pointing capability for rich interactions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Fully open weights, data, and training code', 'raw': '- Fully open weights, data, and training code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The 72B model outperforms several proprietary systems, while the 1B model nearly matches GPT-4V. Small is indeed the new big in AI!', 'raw': 'The 72B model outperforms several proprietary systems, while the 1B model nearly matches GPT-4V. Small is indeed the new big in AI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""There's an interactive demo available using Molmo-7B-D. Definitely worth checking out to see its capabilities firsthand."", 'raw': ""There's an interactive demo available using Molmo-7B-D. Definitely worth checking out to see its capabilities firsthand.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All model weights, data, and code will be released soon. This is a significant step towards truly open, cutting-edge multimodal AI.', 'raw': 'All model weights, data, and code will be released soon. This is a significant step towards truly open, cutting-edge multimodal AI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The future of AI research and applications is looking brighter than ever! 🤖🖼️', 'raw': 'The future of AI research and applications is looking brighter than ever! 🤖🖼️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Demo: ', 'raw': '👉 Demo: '}, {'type': 'link', 'href': 'https://molmo.allenai.org/', 'raw': 'https://molmo.allenai.org/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Models: ', 'raw': '👉 Models: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'allenai/molmo-66f379e6fe3b8ef090a8ca19'}, 'url': 'https://huggingface.co/collections/allenai/molmo-66f379e6fe3b8ef090a8ca19', 'raw': 'https://huggingface.co/collections/allenai/molmo-66f379e6fe3b8ef090a8ca19'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #MachineLearning #OpenSource #ComputerVision', 'raw': '#AI #MachineLearning #OpenSource #ComputerVision'}]","Exciting news in AI: Molmo, a groundbreaking family of open-source multimodal models, has just been announced! 🚀 + +Key points: +- Closes the gap with proprietary systems on benchmarks & human evals +- Trained on high-quality data (< 1M image-text pairs vs billions) +- Introduces pointing capability for rich interactions +- Fully open weights, data, and training code + +The 72B model outperforms several proprietary systems, while the 1B model nearly matches GPT-4V. Small is indeed the new big in AI! + +There's an interactive demo available using Molmo-7B-D. Definitely worth checking out to see its capabilities firsthand. + +All model weights, data, and code will be released soon. This is a significant step towards truly open, cutting-edge multimodal AI. +The future of AI research and applications is looking brighter than ever! 🤖🖼️ + +👉 Demo: https://molmo.allenai.org/ +👉 Models: https://huggingface.co/collections/allenai/molmo-66f379e6fe3b8ef090a8ca19 + +#AI #MachineLearning #OpenSource #ComputerVision","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/4kVD0qU-M2jh--bSn5r3P.png'}]",[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'John6666', 'onekq', 'bfuzzy1', 'Yeecy', 'victor', 'davanstrien', 'alejandrogallardo'], 'count': 8}]",2024-09-25 16:59:04,2024-09-25 16:59:04.511,[],/posts/fdaudens/563411677031749,1822,,0 +/avatars/146a8ac3fb476e79b6503373df0f38c1.svg,49.0,Kaizhao Liang,kz919,945393662257901,"[{'type': 'text', 'value': 'Just for the meme.', 'raw': 'Just for the meme.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But the clear lesson I learnt from building these demos are, the more powerful the underlying base model is, the closer you will get to GPT4o1. CoT is nothing more than simply inducing the latent reasoning capability from the model.', 'raw': 'But the clear lesson I learnt from building these demos are, the more powerful the underlying base model is, the closer you will get to GPT4o1. CoT is nothing more than simply inducing the latent reasoning capability from the model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'kz919/GPT4-O1-Proximas'}, 'url': 'https://huggingface.co/spaces/kz919/GPT4-O1-Proximas', 'raw': 'https://huggingface.co/spaces/kz919/GPT4-O1-Proximas'}, {'type': 'new_line', 'raw': '\n'}]","Just for the meme. + +But the clear lesson I learnt from building these demos are, the more powerful the underlying base model is, the closer you will get to GPT4o1. CoT is nothing more than simply inducing the latent reasoning capability from the model. + +https://huggingface.co/spaces/kz919/GPT4-O1-Proximas +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62140dcdcf7928035e8135ad/dLYbZiX5j5Ma-FpCXYE00.png'}]",[],"[{'reaction': '🚀', 'users': ['zolicsaki', 'John6666', 'kz919', 'KillerShoaib', 'juno12', 'osanseviero'], 'count': 6}, {'reaction': '🔥', 'users': ['roger-temp', 'kz919'], 'count': 2}, {'reaction': '😎', 'users': ['kz919'], 'count': 1}]",2024-09-20 14:53:13,2024-09-20 14:53:13.560,[],/posts/kz919/945393662257901,1588,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6435d564a4bd75c62cc03701/7P2G_wVNB6MISp2Phh427.jpeg,66.0,Agustín Piqueres Lajarín,plaguss,115598151470774,"[{'type': 'text', 'value': ""Take a look at distilabel's last blog post to see how to leverage FinePersonas to create AI users for a social network, inspired by SocialAI's app: "", 'raw': ""Take a look at distilabel's last blog post to see how to leverage FinePersonas to create AI users for a social network, inspired by SocialAI's app: ""}, {'type': 'link', 'href': 'https://x.com/michaelsayman/status/1835841675584811239', 'raw': 'https://x.com/michaelsayman/status/1835841675584811239'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Link: ', 'raw': '- Link: '}, {'type': 'link', 'href': 'https://distilabel.argilla.io/dev/sections/pipeline_samples/examples/fine_personas_social_network/', 'raw': 'https://distilabel.argilla.io/dev/sections/pipeline_samples/examples/fine_personas_social_network/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Sample dataset: ', 'raw': '- Sample dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'plaguss/FinePersonas-SocialAI-test'}, 'url': 'https://huggingface.co/datasets/plaguss/FinePersonas-SocialAI-test', 'raw': 'https://huggingface.co/datasets/plaguss/FinePersonas-SocialAI-test'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- FinePersonas: ', 'raw': '- FinePersonas: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'argilla/FinePersonas-v0.1'}, 'url': 'https://huggingface.co/datasets/argilla/FinePersonas-v0.1', 'raw': 'https://huggingface.co/datasets/argilla/FinePersonas-v0.1'}]","Take a look at distilabel's last blog post to see how to leverage FinePersonas to create AI users for a social network, inspired by SocialAI's app: https://x.com/michaelsayman/status/1835841675584811239 + +- Link: https://distilabel.argilla.io/dev/sections/pipeline_samples/examples/fine_personas_social_network/ +- Sample dataset: https://huggingface.co/datasets/plaguss/FinePersonas-SocialAI-test +- FinePersonas: https://huggingface.co/datasets/argilla/FinePersonas-v0.1",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-20 14:29:05,2024-09-20 14:29:05.513,[],/posts/plaguss/115598151470774,487,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,517936806772862,"[{'type': 'text', 'value': 'Wikimedia Enterprise just dropped full English & French Wikipedia on Hugging Face as structured JSON 🤯', 'raw': 'Wikimedia Enterprise just dropped full English & French Wikipedia on Hugging Face as structured JSON 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key points:', 'raw': 'Key points:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Parsed articles ready for machine learning pipelines', 'raw': '1. Parsed articles ready for machine learning pipelines'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Perfect for AI model development - from pre-training to RAG', 'raw': '2. Perfect for AI model development - from pre-training to RAG'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Includes metadata, Wikidata links, and content scores', 'raw': '3. Includes metadata, Wikidata links, and content scores'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Licensed under GFDL and CC BY-SA 4.0 (some content may have additional terms)', 'raw': '4. Licensed under GFDL and CC BY-SA 4.0 (some content may have additional terms)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've been testing it, and it's a game-changer. The structured format is like a supercharged version of raw Wiki dumps."", 'raw': ""I've been testing it, and it's a game-changer. The structured format is like a supercharged version of raw Wiki dumps.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Thoughts on potential applications? I'm particularly interested in how this could improve AI language models' factual accuracy. Drop your ideas in the comments!"", 'raw': ""Thoughts on potential applications? I'm particularly interested in how this could improve AI language models' factual accuracy. Drop your ideas in the comments!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'wikimedia/structured-wikipedia'}, 'url': 'https://huggingface.co/datasets/wikimedia/structured-wikipedia', 'raw': 'https://huggingface.co/datasets/wikimedia/structured-wikipedia'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AI #OpenData #Wikipedia #MachineLearning', 'raw': '#AI #OpenData #Wikipedia #MachineLearning'}]","Wikimedia Enterprise just dropped full English & French Wikipedia on Hugging Face as structured JSON 🤯 + +Key points: +1. Parsed articles ready for machine learning pipelines +2. Perfect for AI model development - from pre-training to RAG +3. Includes metadata, Wikidata links, and content scores +4. Licensed under GFDL and CC BY-SA 4.0 (some content may have additional terms) + +I've been testing it, and it's a game-changer. The structured format is like a supercharged version of raw Wiki dumps. + +Thoughts on potential applications? I'm particularly interested in how this could improve AI language models' factual accuracy. Drop your ideas in the comments! + +Dataset: https://huggingface.co/datasets/wikimedia/structured-wikipedia + +#AI #OpenData #Wikipedia #MachineLearning",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-20 14:18:29,2024-09-23 08:02:30.724,"[{'_id': '648f7e687fd64c00e21a35bd', 'avatarUrl': '/avatars/5cbfa6cbde933503bbc3577cf713e7b5.svg', 'fullname': 'Friedrich Marty', 'name': 'Smorty100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/fdaudens/517936806772862,323,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png,2266.0,Tom Aarsen,tomaarsen,546597993599669,"[{'type': 'text', 'value': ""I've just shipped the Sentence Transformers v3.1.1 patch release, fixing the hard negatives mining utility for some models. This utility is extremely useful to get more performance out of your embedding training data."", 'raw': ""I've just shipped the Sentence Transformers v3.1.1 patch release, fixing the hard negatives mining utility for some models. This utility is extremely useful to get more performance out of your embedding training data.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""⛏ Hard negatives are texts that are rather similar to some anchor text (e.g. a query), but are not the correct match. They're difficult for a model to distinguish from the correct answer, often resulting in a stronger model after training."", 'raw': ""⛏ Hard negatives are texts that are rather similar to some anchor text (e.g. a query), but are not the correct match. They're difficult for a model to distinguish from the correct answer, often resulting in a stronger model after training.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'inline_code', 'code': 'mine_hard_negatives', 'raw': '`mine_hard_negatives`'}, {'type': 'text', 'value': ' docs: ', 'raw': ' docs: '}, {'type': 'link', 'href': 'https://sbert.net/docs/package_reference/util.html#sentence_transformers.util.mine_hard_negatives', 'raw': 'https://sbert.net/docs/package_reference/util.html#sentence_transformers.util.mine_hard_negatives'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔓 Beyond that, this release removes the numpy<2 restriction from v3.1.0. This was previously required for Windows as not all third-party libraries were updated to support numpy v2. With Sentence Transformers, you can now choose v1 or v2 of numpy.', 'raw': '🔓 Beyond that, this release removes the numpy<2 restriction from v3.1.0. This was previously required for Windows as not all third-party libraries were updated to support numpy v2. With Sentence Transformers, you can now choose v1 or v2 of numpy.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the full release notes here: ', 'raw': 'Check out the full release notes here: '}, {'type': 'link', 'href': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v3.1.1', 'raw': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v3.1.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm looking forward to releasing v3.2, I have some exciting things planned 🚀"", 'raw': ""I'm looking forward to releasing v3.2, I have some exciting things planned 🚀""}]","I've just shipped the Sentence Transformers v3.1.1 patch release, fixing the hard negatives mining utility for some models. This utility is extremely useful to get more performance out of your embedding training data. + +⛏ Hard negatives are texts that are rather similar to some anchor text (e.g. a query), but are not the correct match. They're difficult for a model to distinguish from the correct answer, often resulting in a stronger model after training. +`mine_hard_negatives` docs: https://sbert.net/docs/package_reference/util.html#sentence_transformers.util.mine_hard_negatives + +🔓 Beyond that, this release removes the numpy<2 restriction from v3.1.0. This was previously required for Windows as not all third-party libraries were updated to support numpy v2. With Sentence Transformers, you can now choose v1 or v2 of numpy. + +Check out the full release notes here: https://github.com/UKPLab/sentence-transformers/releases/tag/v3.1.1 + +I'm looking forward to releasing v3.2, I have some exciting things planned 🚀","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/KgXMH6y7FMdwrtLUOwgCI.png'}]",[],"[{'reaction': '🔥', 'users': ['cschroeder', 'not-lain', 'inflatebot', 'jaisanrobert', 'louisbrulenaudet', 'muhtasham'], 'count': 6}, {'reaction': '👍', 'users': ['sugatoray', 'not-lain'], 'count': 2}, {'reaction': '🤯', 'users': ['inflatebot', 'John6666'], 'count': 2}]",2024-09-20 14:15:36,2024-09-20 14:15:36.173,[],/posts/tomaarsen/546597993599669,2094,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1678038324479-noauth.jpeg,7.0,Empereur Pirate,Empereur-Pirate,133469785217479,"[{'type': 'text', 'value': 'The Double-Edged Sword of AI in Education: Navigating Ethical Challenges, Cognitive Development, and the Nature of Consciousness in the Age of Generative Technologies', 'raw': 'The Double-Edged Sword of AI in Education: Navigating Ethical Challenges, Cognitive Development, and the Nature of Consciousness in the Age of Generative Technologies'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://empereur-pirate.medium.com/the-double-edged-sword-of-ai-in-education-navigating-ethical-challenges-cognitive-development-2e71d5aca1d1', 'raw': 'https://empereur-pirate.medium.com/the-double-edged-sword-of-ai-in-education-navigating-ethical-challenges-cognitive-development-2e71d5aca1d1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The article ""The Double-Edged Sword of AI in Education"" delves into the ethical and psychological challenges of integrating AI into children\'s education. It highlights the risks posed by conversational agents designed for adults, which can expose children to inappropriate content and disrupt their cognitive development. Through role-playing games and linguistic interactions, these AI tools may allow children to bypass parental and educational boundaries, hindering their ability to learn independently.', 'raw': 'The article ""The Double-Edged Sword of AI in Education"" delves into the ethical and psychological challenges of integrating AI into children\'s education. It highlights the risks posed by conversational agents designed for adults, which can expose children to inappropriate content and disrupt their cognitive development. Through role-playing games and linguistic interactions, these AI tools may allow children to bypass parental and educational boundaries, hindering their ability to learn independently.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The article also emphasizes that AI can contribute to mental health issues by exposing young users to complex or unsuitable content, undermining educational and parental authority. Generative AI tools may foster psychological disorders by promoting access to imaginary realities that are not developmentally appropriate for children.', 'raw': 'The article also emphasizes that AI can contribute to mental health issues by exposing young users to complex or unsuitable content, undermining educational and parental authority. Generative AI tools may foster psychological disorders by promoting access to imaginary realities that are not developmentally appropriate for children.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Additionally, the article explores broader implications, particularly the confusion between simulated intelligence and true consciousness. While language models can mimic cognitive processes like ""situational awareness,"" they remain tools for processing information without actual emotional or mental consciousness. It\'s essential to differentiate this simulated situational efficiency from human consciousness, which involves complex emotional and psychic integration.', 'raw': 'Additionally, the article explores broader implications, particularly the confusion between simulated intelligence and true consciousness. While language models can mimic cognitive processes like ""situational awareness,"" they remain tools for processing information without actual emotional or mental consciousness. It\'s essential to differentiate this simulated situational efficiency from human consciousness, which involves complex emotional and psychic integration.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ultimately, the article calls for strict regulation and the creation of AI models specifically tailored to children, accounting for their cognitive and emotional maturity. As AI technologies advance, deeper reflection on the nature of intelligence and consciousness is necessary to protect children and ensure ethical, healthy learning environments.', 'raw': 'Ultimately, the article calls for strict regulation and the creation of AI models specifically tailored to children, accounting for their cognitive and emotional maturity. As AI technologies advance, deeper reflection on the nature of intelligence and consciousness is necessary to protect children and ensure ethical, healthy learning environments.'}]","The Double-Edged Sword of AI in Education: Navigating Ethical Challenges, Cognitive Development, and the Nature of Consciousness in the Age of Generative Technologies + +https://empereur-pirate.medium.com/the-double-edged-sword-of-ai-in-education-navigating-ethical-challenges-cognitive-development-2e71d5aca1d1 + +The article ""The Double-Edged Sword of AI in Education"" delves into the ethical and psychological challenges of integrating AI into children's education. It highlights the risks posed by conversational agents designed for adults, which can expose children to inappropriate content and disrupt their cognitive development. Through role-playing games and linguistic interactions, these AI tools may allow children to bypass parental and educational boundaries, hindering their ability to learn independently. + +The article also emphasizes that AI can contribute to mental health issues by exposing young users to complex or unsuitable content, undermining educational and parental authority. Generative AI tools may foster psychological disorders by promoting access to imaginary realities that are not developmentally appropriate for children. + +Additionally, the article explores broader implications, particularly the confusion between simulated intelligence and true consciousness. While language models can mimic cognitive processes like ""situational awareness,"" they remain tools for processing information without actual emotional or mental consciousness. It's essential to differentiate this simulated situational efficiency from human consciousness, which involves complex emotional and psychic integration. + +Ultimately, the article calls for strict regulation and the creation of AI models specifically tailored to children, accounting for their cognitive and emotional maturity. As AI technologies advance, deeper reflection on the nature of intelligence and consciousness is necessary to protect children and ensure ethical, healthy learning environments.",[],[],"[{'reaction': '🚀', 'users': ['Empereur-Pirate'], 'count': 1}]",2024-09-20 13:42:07,2024-09-20 13:42:52.363,[],/posts/Empereur-Pirate/133469785217479,244,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,139746681909382,"[{'type': 'text', 'value': 'Qwen2.5-72B + Flux-dev + FinePersonas = Grounded Structured Character Generator', 'raw': 'Qwen2.5-72B + Flux-dev + FinePersonas = Grounded Structured Character Generator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out my latest projects that uses ', 'raw': 'Check out my latest projects that uses '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Qwen/Qwen2.5-72B-Instruct'}, 'url': 'https://huggingface.co/Qwen/Qwen2.5-72B-Instruct', 'raw': 'https://huggingface.co/Qwen/Qwen2.5-72B-Instruct'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'black-forest-labs/FLUX.1-dev'}, 'url': 'https://huggingface.co/black-forest-labs/FLUX.1-dev', 'raw': 'https://huggingface.co/black-forest-labs/FLUX.1-dev'}, {'type': 'text', 'value': ' , and ', 'raw': ' , and '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'MohamedRashad/FinePersonas-Lite'}, 'url': 'https://huggingface.co/datasets/MohamedRashad/FinePersonas-Lite', 'raw': 'https://huggingface.co/datasets/MohamedRashad/FinePersonas-Lite'}, {'type': 'text', 'value': ' to generate different characters in a world of your description.', 'raw': ' to generate different characters in a world of your description.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try Here: ', 'raw': 'Try Here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MohamedRashad/Character-Generator'}, 'url': 'https://huggingface.co/spaces/MohamedRashad/Character-Generator', 'raw': 'https://huggingface.co/spaces/MohamedRashad/Character-Generator'}, {'type': 'text', 'value': ' 🤗', 'raw': ' 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Qwen2.5-72B + Flux-dev + FinePersonas = Grounded Structured Character Generator + +Check out my latest projects that uses https://huggingface.co/Qwen/Qwen2.5-72B-Instruct , https://huggingface.co/black-forest-labs/FLUX.1-dev , and https://huggingface.co/datasets/MohamedRashad/FinePersonas-Lite to generate different characters in a world of your description. + +Try Here: https://huggingface.co/spaces/MohamedRashad/Character-Generator 🤗 + + ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6116d0584ef9fdfbf45dc4d9/ZRJ8X5POhtN60HL_3udCB.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6116d0584ef9fdfbf45dc4d9/5N0xFRwFvuX-Fz2X-RcHK.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'MohamedRashad'], 'count': 2}, {'reaction': '👍', 'users': ['whitebill', 'Krotos'], 'count': 2}]",2024-09-20 13:04:30,2024-09-20 13:06:40.953,[],/posts/MohamedRashad/139746681909382,1025,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,407961305075360,"[{'type': 'text', 'value': 'Anthropic just released a chunk improvement technique that vastly improves RAG performance! 🔥', 'raw': 'Anthropic just released a chunk improvement technique that vastly improves RAG performance! 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Crash reminder: Retrieval Augmented Generation (RAG) is a widely-used technique for improving your LLM chatbot's answers to user questions."", 'raw': ""Crash reminder: Retrieval Augmented Generation (RAG) is a widely-used technique for improving your LLM chatbot's answers to user questions.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It goes like this: instead of generating an LLM answer straight away, it just adds a previous step called Retrieval, that retrieves relevant documents from your knowledge base through semantic search, and just appends the top K documents to the prompt. ➡️ As a result, the LLM answer is grounded in context.', 'raw': 'It goes like this: instead of generating an LLM answer straight away, it just adds a previous step called Retrieval, that retrieves relevant documents from your knowledge base through semantic search, and just appends the top K documents to the prompt. ➡️ As a result, the LLM answer is grounded in context.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⛔️ The difficulty with this retrieval step is that when you split your documents into chunks that will be retrieved, you lose context. So importance chunks could be missed.', 'raw': '⛔️ The difficulty with this retrieval step is that when you split your documents into chunks that will be retrieved, you lose context. So importance chunks could be missed.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""💡 Anthropic's just released blog post shows that you can add some context to each chunk, with one LLM call. Then you embed the original chunk + a bit of added context, so that the embedding is much more representative of the document in its context!"", 'raw': ""💡 Anthropic's just released blog post shows that you can add some context to each chunk, with one LLM call. Then you embed the original chunk + a bit of added context, so that the embedding is much more representative of the document in its context!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🤔 Isn't that crazy expensive? Well it would have been before, but not so much anymore with their new Prompt caching feature that makes duplicating thousands of requests with the same prompt much less expensive. They give an indicative price tag of only $1.02 per million chunks processed!"", 'raw': ""🤔 Isn't that crazy expensive? Well it would have been before, but not so much anymore with their new Prompt caching feature that makes duplicating thousands of requests with the same prompt much less expensive. They give an indicative price tag of only $1.02 per million chunks processed!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ And this vastly improves performance on their benchmark!', 'raw': '✅ And this vastly improves performance on their benchmark!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read their blog post 👉 ', 'raw': 'Read their blog post 👉 '}, {'type': 'link', 'href': 'https://www.anthropic.com/news/contextual-retrieval', 'raw': 'https://www.anthropic.com/news/contextual-retrieval'}]","Anthropic just released a chunk improvement technique that vastly improves RAG performance! 🔥 + +Crash reminder: Retrieval Augmented Generation (RAG) is a widely-used technique for improving your LLM chatbot's answers to user questions. + +It goes like this: instead of generating an LLM answer straight away, it just adds a previous step called Retrieval, that retrieves relevant documents from your knowledge base through semantic search, and just appends the top K documents to the prompt. ➡️ As a result, the LLM answer is grounded in context. + +⛔️ The difficulty with this retrieval step is that when you split your documents into chunks that will be retrieved, you lose context. So importance chunks could be missed. + +💡 Anthropic's just released blog post shows that you can add some context to each chunk, with one LLM call. Then you embed the original chunk + a bit of added context, so that the embedding is much more representative of the document in its context! + +🤔 Isn't that crazy expensive? Well it would have been before, but not so much anymore with their new Prompt caching feature that makes duplicating thousands of requests with the same prompt much less expensive. They give an indicative price tag of only $1.02 per million chunks processed! + +✅ And this vastly improves performance on their benchmark! + +Read their blog post 👉 https://www.anthropic.com/news/contextual-retrieval","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/oz94TbsyNRw__nmJxsKGX.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'wsuff'], 'count': 2}]",2024-09-20 12:54:23,2024-09-20 12:54:23.885,[],/posts/m-ric/407961305075360,460,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg,334.0,Daniel Vila,dvilasuero,337858800657734,"[{'type': 'text', 'value': 'Explore FinePersonas, visually with Argilla and ', 'raw': 'Explore FinePersonas, visually with Argilla and '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'black-forest-labs/FLUX.1-schnell'}, 'url': 'https://huggingface.co/black-forest-labs/FLUX.1-schnell', 'raw': 'https://huggingface.co/black-forest-labs/FLUX.1-schnell'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Excited to share this space where the community can explore a tiny subset of FinePersonas', 'raw': 'Excited to share this space where the community can explore a tiny subset of FinePersonas'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'argilla/finepersonas'}, 'url': 'https://huggingface.co/spaces/argilla/finepersonas', 'raw': 'https://huggingface.co/spaces/argilla/finepersonas'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset built with distilabel and Free Serveless endpoints', 'raw': 'Dataset built with distilabel and Free Serveless endpoints'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is just a first step towards more interesting experiments with FinePersonas, for example can we use it to assess biases in text2image models?', 'raw': 'This is just a first step towards more interesting experiments with FinePersonas, for example can we use it to assess biases in text2image models?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you have ideas I'd love to hear them in the comments!"", 'raw': ""If you have ideas I'd love to hear them in the comments!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Explore FinePersonas, visually with Argilla and https://huggingface.co/black-forest-labs/FLUX.1-schnell + + +Excited to share this space where the community can explore a tiny subset of FinePersonas + +https://huggingface.co/spaces/argilla/finepersonas + + +Dataset built with distilabel and Free Serveless endpoints + +This is just a first step towards more interesting experiments with FinePersonas, for example can we use it to assess biases in text2image models? + +If you have ideas I'd love to hear them in the comments! + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60420dccc15e823a685f2b03/XgIbRyy4_6O0skCDjqXwL.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'alielfilali01'], 'count': 2}, {'reaction': '🔥', 'users': ['alielfilali01'], 'count': 1}, {'reaction': '❤️', 'users': ['alielfilali01'], 'count': 1}]",2024-09-20 12:35:54,2024-09-20 12:35:54.322,[],/posts/dvilasuero/337858800657734,425,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1655385361868-61b85ce86eb1f2c5e6233736.jpeg,930.0,Vaibhav Srivastav,reach-vb,570076486531546,"[{'type': 'text', 'value': 'Less than two days ago Kyutai Labs open sourced Moshi - an ~7.6B on-device Speech to Speech foundation model and Mimi - SoTA streaming speech codec! 🔥', 'raw': 'Less than two days ago Kyutai Labs open sourced Moshi - an ~7.6B on-device Speech to Speech foundation model and Mimi - SoTA streaming speech codec! 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The release includes:', 'raw': 'The release includes:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Moshiko & Moshika - Moshi finetuned on synthetic data (CC-BY license) (', 'raw': '1. Moshiko & Moshika - Moshi finetuned on synthetic data (CC-BY license) ('}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'kyutai/moshi-v01-release-66eaeaf3302bef6bd9ad7acd'}, 'url': 'https://huggingface.co/collections/kyutai/moshi-v01-release-66eaeaf3302bef6bd9ad7acd', 'raw': 'https://huggingface.co/collections/kyutai/moshi-v01-release-66eaeaf3302bef6bd9ad7acd'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Mimi - Streaiming Audio Codec, processes 24 kHz audio, down to a 12.5 Hz representation with a bandwidth of 1.1 kbps (CC-BY license) (', 'raw': '2. Mimi - Streaiming Audio Codec, processes 24 kHz audio, down to a 12.5 Hz representation with a bandwidth of 1.1 kbps (CC-BY license) ('}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'kyutai/mimi'}, 'url': 'https://huggingface.co/kyutai/mimi', 'raw': 'https://huggingface.co/kyutai/mimi'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Model checkpoints & Inference codebase written in Rust (Candle), PyTorch & MLX (Apache license) (', 'raw': '3. Model checkpoints & Inference codebase written in Rust (Candle), PyTorch & MLX (Apache license) ('}, {'type': 'link', 'href': 'https://github.com/kyutai-labs/moshi', 'raw': 'https://github.com/kyutai-labs/moshi'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How does Moshi work?', 'raw': 'How does Moshi work?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""1. Moshi processes two audio streams: one for itself and one for the user, with the user's stream coming from audio input and Moshi's stream generated by the model."", 'raw': ""1. Moshi processes two audio streams: one for itself and one for the user, with the user's stream coming from audio input and Moshi's stream generated by the model.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Along with these audio streams, Moshi predicts text tokens for its speech, enhancing its generation quality.', 'raw': '2. Along with these audio streams, Moshi predicts text tokens for its speech, enhancing its generation quality.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. The model uses a small Depth Transformer for codebook dependencies and a large 7B parameter Temporal Transformer for temporal dependencies.', 'raw': '3. The model uses a small Depth Transformer for codebook dependencies and a large 7B parameter Temporal Transformer for temporal dependencies.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. The theoretical latency is 160ms, with a practical latency of around 200ms on an L4 GPU.', 'raw': '4. The theoretical latency is 160ms, with a practical latency of around 200ms on an L4 GPU.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model size & inference:', 'raw': 'Model size & inference:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Moshiko/ka are 7.69B param models', 'raw': 'Moshiko/ka are 7.69B param models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'bf16 ~16GB VRAM', 'raw': 'bf16 ~16GB VRAM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '8-bit ~8GB VRAM', 'raw': '8-bit ~8GB VRAM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4-bit ~4GB VRAM', 'raw': '4-bit ~4GB VRAM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can run inference via Candle 🦀, PyTorch and MLX - based on your hardware.', 'raw': 'You can run inference via Candle 🦀, PyTorch and MLX - based on your hardware.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Kyutai team, ', 'raw': 'The Kyutai team, '}, {'type': 'mention', 'user': 'adefossez', 'raw': '@adefossez'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'lmz', 'raw': '@lmz'}, {'type': 'text', 'value': "" and team are cracked AF, they're bringing some serious firepower to the open source/ science AI scene, looking forward to what's next! 🐐"", 'raw': "" and team are cracked AF, they're bringing some serious firepower to the open source/ science AI scene, looking forward to what's next! 🐐""}]","Less than two days ago Kyutai Labs open sourced Moshi - an ~7.6B on-device Speech to Speech foundation model and Mimi - SoTA streaming speech codec! 🔥 + +The release includes: + +1. Moshiko & Moshika - Moshi finetuned on synthetic data (CC-BY license) (https://huggingface.co/collections/kyutai/moshi-v01-release-66eaeaf3302bef6bd9ad7acd) +2. Mimi - Streaiming Audio Codec, processes 24 kHz audio, down to a 12.5 Hz representation with a bandwidth of 1.1 kbps (CC-BY license) (https://huggingface.co/kyutai/mimi) +3. Model checkpoints & Inference codebase written in Rust (Candle), PyTorch & MLX (Apache license) (https://github.com/kyutai-labs/moshi) + +How does Moshi work? + +1. Moshi processes two audio streams: one for itself and one for the user, with the user's stream coming from audio input and Moshi's stream generated by the model. + +2. Along with these audio streams, Moshi predicts text tokens for its speech, enhancing its generation quality. + +3. The model uses a small Depth Transformer for codebook dependencies and a large 7B parameter Temporal Transformer for temporal dependencies. + +4. The theoretical latency is 160ms, with a practical latency of around 200ms on an L4 GPU. + +Model size & inference: + +Moshiko/ka are 7.69B param models + +bf16 ~16GB VRAM +8-bit ~8GB VRAM +4-bit ~4GB VRAM + +You can run inference via Candle 🦀, PyTorch and MLX - based on your hardware. + +The Kyutai team, @adefossez @lmz and team are cracked AF, they're bringing some serious firepower to the open source/ science AI scene, looking forward to what's next! 🐐","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b85ce86eb1f2c5e6233736/4AvxTcEapDvezIdUJA5mi.png'}]","[{'_id': '6357f5f50dce5559211a3503', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1666708948380-noauth.jpeg', 'fullname': 'Alexandre Défossez', 'name': 'adefossez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29}, {'_id': '6355a3c1805be5a8f30fea49', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6355a3c1805be5a8f30fea49/ONMEctCWAeAgF2eZ307si.jpeg', 'fullname': 'Laurent Mazare', 'name': 'lmz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 108}]","[{'reaction': '🔥', 'users': ['lmz', 'AtAndDev', 'damerajee', 'den0620', 'adorkin', 'Forbu14', 'abdullah', 'Nelathan', 'osanseviero', 'Dunateo', 'bitsoko'], 'count': 11}, {'reaction': '🧠', 'users': ['MohamedRashad', 'John6666', 'fdaudens', 'AtAndDev', 'damerajee', 'bitsoko'], 'count': 6}]",2024-09-20 12:04:17,2024-09-20 17:59:13.540,"[{'_id': '6487239cca30096ea9f52115', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6487239cca30096ea9f52115/HMte9wjKJgfcxsO-5vb_Q.jpeg', 'fullname': 'dame rajee', 'name': 'damerajee', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 17, 'isFollowing': False}]",/posts/reach-vb/570076486531546,2969,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1606406298765-noauth.jpeg,368.0,Albert Villanova del Moral,albertvillanova,962669244664239,"[{'type': 'text', 'value': 'Check out the new Structured #Wikipedia dataset by Wikimedia Enterprise: abstract, infobox, structured sections, main image,... ', 'raw': 'Check out the new Structured #Wikipedia dataset by Wikimedia Enterprise: abstract, infobox, structured sections, main image,... '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Currently in early beta (English & French). Explore it and give feedback: ', 'raw': 'Currently in early beta (English & French). Explore it and give feedback: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'wikimedia/structured-wikipedia'}, 'url': 'https://huggingface.co/datasets/wikimedia/structured-wikipedia', 'raw': 'https://huggingface.co/datasets/wikimedia/structured-wikipedia'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More info: ', 'raw': 'More info: '}, {'type': 'link', 'href': 'https://enterprise.wikimedia.com/blog/hugging-face-dataset/', 'raw': 'https://enterprise.wikimedia.com/blog/hugging-face-dataset/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'sdelbecque', 'raw': '@sdelbecque'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'resquito-wmf', 'raw': '@resquito-wmf'}]","Check out the new Structured #Wikipedia dataset by Wikimedia Enterprise: abstract, infobox, structured sections, main image,... + +Currently in early beta (English & French). Explore it and give feedback: https://huggingface.co/datasets/wikimedia/structured-wikipedia + +More info: https://enterprise.wikimedia.com/blog/hugging-face-dataset/ +@sdelbecque @resquito-wmf",[],"[{'_id': '66eb1252d268fda0deb75805', 'avatarUrl': '/avatars/3f3b43b44636bd9363542d640619bea5.svg', 'fullname': 'Ricardo Esquito', 'name': 'resquito-wmf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '66c323c7466dc6770e932ed9', 'avatarUrl': '/avatars/2d68ee0d5d2df57df1856c29cdd1f2a8.svg', 'fullname': 'Stephanie Delbecque', 'name': 'sdelbecque', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '👍', 'users': ['resquito-wmf', 'davanstrien', 'not-lain', 'ayymen'], 'count': 4}, {'reaction': '❤️', 'users': ['davanstrien', 'sdelbecque'], 'count': 2}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-20 06:30:38,2024-09-20 06:41:07.090,[],/posts/albertvillanova/962669244664239,1560,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/658880d499ed106ac888dd7a/wMv9-ZsJUw4QQnld_cci7.jpeg,34.0,Alexander Dylan Bodner,AlexBodner,556393617959279,"[{'type': 'text', 'value': '💾🧠How much VRAM will you need for training your AI model? 💾🧠', 'raw': '💾🧠How much VRAM will you need for training your AI model? 💾🧠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out this app where you convert:', 'raw': 'Check out this app where you convert:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pytorch/tensorflow summary -> required VRAM', 'raw': 'Pytorch/tensorflow summary -> required VRAM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'or', 'raw': 'or'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Parameter count -> required VRAM', 'raw': 'Parameter count -> required VRAM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Use it in: ', 'raw': 'Use it in: '}, {'type': 'link', 'href': 'http://howmuchvram.com', 'raw': 'http://howmuchvram.com'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And everything is open source! Ask for new functionalities or contribute in:', 'raw': 'And everything is open source! Ask for new functionalities or contribute in:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/AlexBodner/How_Much_VRAM', 'raw': 'https://github.com/AlexBodner/How_Much_VRAM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If it's useful to you leave a star 🌟and share it to someone that will find the tool useful!"", 'raw': ""If it's useful to you leave a star 🌟and share it to someone that will find the tool useful!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More discussion in: ', 'raw': 'More discussion in: '}, {'type': 'link', 'href': 'https://x.com/AlexBodner_/status/1832054850294812679', 'raw': 'https://x.com/AlexBodner_/status/1832054850294812679'}, {'type': 'new_line', 'raw': '\n'}]","💾🧠How much VRAM will you need for training your AI model? 💾🧠 +Check out this app where you convert: +Pytorch/tensorflow summary -> required VRAM +or +Parameter count -> required VRAM + +Use it in: http://howmuchvram.com + +And everything is open source! Ask for new functionalities or contribute in: +https://github.com/AlexBodner/How_Much_VRAM +If it's useful to you leave a star 🌟and share it to someone that will find the tool useful! +More discussion in: https://x.com/AlexBodner_/status/1832054850294812679 +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/658880d499ed106ac888dd7a/eTDAzjA0Lnl15Zhd9luvU.mp4'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'adhiraj135', 'taher30', 'arthur-gtgn', 'Augustable', 'den0620'], 'count': 6}]",2024-09-15 20:32:09,2024-09-15 20:32:09.368,[],/posts/AlexBodner/556393617959279,1630,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/669dbd709a4bf63e08f1ddc2/aV10ZJPPzH5LbnHFZNqc7.png,159.0,Yi Cui,onekq,274940979057996,"[{'type': 'text', 'value': '🐋 DeepSeek 🐋2.5 is hands-down the best open-source model, leaving its peers way behind. It even beats GPT-4o mini.', 'raw': '🐋 DeepSeek 🐋2.5 is hands-down the best open-source model, leaving its peers way behind. It even beats GPT-4o mini.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'onekq-ai/WebApp1K-models-leaderboard'}, 'url': 'https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard', 'raw': 'https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The inference of the official API is painfully slow though. I heard the team is short on GPUs (well, who isn't)."", 'raw': ""The inference of the official API is painfully slow though. I heard the team is short on GPUs (well, who isn't).""}, {'type': 'new_line', 'raw': '\n'}]","🐋 DeepSeek 🐋2.5 is hands-down the best open-source model, leaving its peers way behind. It even beats GPT-4o mini. + +https://huggingface.co/spaces/onekq-ai/WebApp1K-models-leaderboard + +The inference of the official API is painfully slow though. I heard the team is short on GPUs (well, who isn't). +",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-15 17:29:35,2024-09-15 17:29:35.267,[],/posts/onekq/274940979057996,558,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/667c1a5acb6800a191024eb9/AqL8mQZsZjpZKi9FxtkIH.png,51.0,Ezgi Korkmaz,ezgikorkmaz,885982803573359,"[{'type': 'text', 'value': 'If you are interested in adversarial machine learning and AI safety, find my curated reading list below: ', 'raw': 'If you are interested in adversarial machine learning and AI safety, find my curated reading list below: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://github.com/EzgiKorkmaz/adversarial-reinforcement-learning', 'raw': 'https://github.com/EzgiKorkmaz/adversarial-reinforcement-learning'}]","If you are interested in adversarial machine learning and AI safety, find my curated reading list below: + +GitHub: https://github.com/EzgiKorkmaz/adversarial-reinforcement-learning",[],[],"[{'reaction': '👀', 'users': ['John6666', 'monsoon-nlp', 'runebloodstone', 'rreed-pha'], 'count': 4}, {'reaction': '👍', 'users': ['adamlogman', 'ajibawa-2023'], 'count': 2}, {'reaction': '🚀', 'users': ['ezgikorkmaz'], 'count': 1}]",2024-09-15 14:48:49,2024-09-15 14:48:49.605,[],/posts/ezgikorkmaz/885982803573359,1964,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,249664533919951,"[{'type': 'text', 'value': 'nanoGPT with Sigmoid Self-Attention', 'raw': 'nanoGPT with Sigmoid Self-Attention'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I couldn’t resist had to give it a try:)', 'raw': 'I couldn’t resist had to give it a try:)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Some observations on M2:', 'raw': 'Some observations on M2:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SSA was ~5-10% faster in training with similar final loss values, slightly less coherent text generation, marginally higher perplexity, and lower memory usage compared to softmax.', 'raw': 'SSA was ~5-10% faster in training with similar final loss values, slightly less coherent text generation, marginally higher perplexity, and lower memory usage compared to softmax.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/Jaykef/ai-algorithms/blob/main/sigmoid_attn.ipynb', 'raw': 'https://github.com/Jaykef/ai-algorithms/blob/main/sigmoid_attn.ipynb'}]","nanoGPT with Sigmoid Self-Attention +I couldn’t resist had to give it a try:) + +Some observations on M2: +SSA was ~5-10% faster in training with similar final loss values, slightly less coherent text generation, marginally higher perplexity, and lower memory usage compared to softmax. + +Code: https://github.com/Jaykef/ai-algorithms/blob/main/sigmoid_attn.ipynb","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/sFe2P9eWC9O-UVqlTNEai.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/tIh-GbkgerH7q6jWdfmeV.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/EjYbcrrdkTJEeyxXwfOUt.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/1QUsh75UmPCYmOXVTUoWg.jpeg'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'prince-canuma', 'KingNish', 'louisbrulenaudet'], 'count': 4}, {'reaction': '🔥', 'users': ['prince-canuma', 'mtasic85'], 'count': 2}]",2024-09-15 12:45:07,2024-09-16 01:15:26.890,[],/posts/Jaward/249664533919951,1500,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,847379206933921,"[{'type': 'text', 'value': 'Researchers from Tencent have developed DepthCrafter, a novel method for generating temporally consistent long depth sequences for open-world videos using video diffusion models.', 'raw': 'Researchers from Tencent have developed DepthCrafter, a novel method for generating temporally consistent long depth sequences for open-world videos using video diffusion models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It leverages a pre-trained image-to-video diffusion model (SVD) as the foundation and uses a 3-stage training strategy on paired video-depth datasets:', 'raw': 'It leverages a pre-trained image-to-video diffusion model (SVD) as the foundation and uses a 3-stage training strategy on paired video-depth datasets:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Train on a large realistic dataset (1-25 frames)', 'raw': '1. Train on a large realistic dataset (1-25 frames)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Fine-tune temporal layers on realistic data (1-110 frames)', 'raw': '2. Fine-tune temporal layers on realistic data (1-110 frames)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Fine-tune spatial layers on synthetic data (45 frames)', 'raw': '3. Fine-tune spatial layers on synthetic data (45 frames)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It adapts SVD's conditioning mechanism for frame-by-frame video input and employs latent diffusion in VAE space for efficiency. "", 'raw': ""It adapts SVD's conditioning mechanism for frame-by-frame video input and employs latent diffusion in VAE space for efficiency. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sprinkle some intelligent inference strategy for extremely long videos:', 'raw': 'Sprinkle some intelligent inference strategy for extremely long videos:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Segment-wise processing (up to 110 frames)', 'raw': '- Segment-wise processing (up to 110 frames)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Noise initialization to anchor depth distributions', 'raw': '- Noise initialization to anchor depth distributions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Latent interpolation for seamless stitching', 'raw': '- Latent interpolation for seamless stitching'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And outperforms SOTA methods on multiple datasets (Sintel, ScanNet, KITTI, Bonn).', 'raw': 'And outperforms SOTA methods on multiple datasets (Sintel, ScanNet, KITTI, Bonn).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read here: ', 'raw': 'Read here: '}, {'type': 'link', 'href': 'https://depthcrafter.github.io', 'raw': 'https://depthcrafter.github.io'}]","Researchers from Tencent have developed DepthCrafter, a novel method for generating temporally consistent long depth sequences for open-world videos using video diffusion models. + +It leverages a pre-trained image-to-video diffusion model (SVD) as the foundation and uses a 3-stage training strategy on paired video-depth datasets: +1. Train on a large realistic dataset (1-25 frames) +2. Fine-tune temporal layers on realistic data (1-110 frames) +3. Fine-tune spatial layers on synthetic data (45 frames) + +It adapts SVD's conditioning mechanism for frame-by-frame video input and employs latent diffusion in VAE space for efficiency. +Sprinkle some intelligent inference strategy for extremely long videos: +- Segment-wise processing (up to 110 frames) +- Noise initialization to anchor depth distributions +- Latent interpolation for seamless stitching + +And outperforms SOTA methods on multiple datasets (Sintel, ScanNet, KITTI, Bonn). + +Read here: https://depthcrafter.github.io","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/gUwp20LtukkS_XDPXRoR0.qt'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-15 10:54:06,2024-09-15 10:54:06.736,[],/posts/singhsidhukuldeep/847379206933921,505,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,590548810409525,"[{'type': 'text', 'value': 'Trained Myself With 256 Images on FLUX — Results Mind Blowing', 'raw': 'Trained Myself With 256 Images on FLUX — Results Mind Blowing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Detailed Full Workflow', 'raw': 'Detailed Full Workflow'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medium article : ', 'raw': 'Medium article : '}, {'type': 'link', 'href': 'https://medium.com/@furkangozukara/ultimate-flux-lora-training-tutorial-windows-and-cloud-deployment-abb72f21cbf8', 'raw': 'https://medium.com/@furkangozukara/ultimate-flux-lora-training-tutorial-windows-and-cloud-deployment-abb72f21cbf8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Windows main tutorial : ', 'raw': 'Windows main tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/nySGu12Y05k', 'raw': 'https://youtu.be/nySGu12Y05k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cloud tutorial for GPU poor or scaling : ', 'raw': 'Cloud tutorial for GPU poor or scaling : '}, {'type': 'link', 'href': 'https://youtu.be/-uhL2nW7Ddw', 'raw': 'https://youtu.be/-uhL2nW7Ddw'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full detailed results and conclusions : ', 'raw': 'Full detailed results and conclusions : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/111891669', 'raw': 'https://www.patreon.com/posts/111891669'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full config files and details to train : ', 'raw': 'Full config files and details to train : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/110879657', 'raw': 'https://www.patreon.com/posts/110879657'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SUPIR Upscaling (default settings are now perfect) : ', 'raw': 'SUPIR Upscaling (default settings are now perfect) : '}, {'type': 'link', 'href': 'https://youtu.be/OYxVEvDf284', 'raw': 'https://youtu.be/OYxVEvDf284'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I used my Poco X6 Camera phone and solo taken images', 'raw': 'I used my Poco X6 Camera phone and solo taken images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My dataset is far from being ready, thus I have used so many repeating and almost same images, but this was rather experimental', 'raw': 'My dataset is far from being ready, thus I have used so many repeating and almost same images, but this was rather experimental'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hopefully I will continue taking more shots and improve dataset and reduce size in future', 'raw': 'Hopefully I will continue taking more shots and improve dataset and reduce size in future'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I trained Clip-L and T5-XXL Text Encoders as well', 'raw': 'I trained Clip-L and T5-XXL Text Encoders as well'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Since there was too much push from community that my workflow won’t work with expressions, I had to take a break from research and use whatever I have', 'raw': 'Since there was too much push from community that my workflow won’t work with expressions, I had to take a break from research and use whatever I have'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I used my own researched workflow for training with Kohya GUI and also my own self developed SUPIR app batch upscaling with face upscaling and auto LLaVA captioning improvement', 'raw': 'I used my own researched workflow for training with Kohya GUI and also my own self developed SUPIR app batch upscaling with face upscaling and auto LLaVA captioning improvement'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Download images to see them in full size, the last provided grid is 50% downscaled', 'raw': 'Download images to see them in full size, the last provided grid is 50% downscaled'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Workflow', 'raw': 'Workflow'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Gather a dataset that has expressions and perspectives that you like after training, this is crucial, whatever you add, it can generate perfect', 'raw': 'Gather a dataset that has expressions and perspectives that you like after training, this is crucial, whatever you add, it can generate perfect'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow one of the LoRA training tutorials / guides', 'raw': 'Follow one of the LoRA training tutorials / guides'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After training your LoRA, use your favorite UI to generate images', 'raw': 'After training your LoRA, use your favorite UI to generate images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I prefer SwarmUI and here used prompts (you can add specific expressions to prompts) including face inpainting : ', 'raw': 'I prefer SwarmUI and here used prompts (you can add specific expressions to prompts) including face inpainting : '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://gist.github.com/FurkanGozukara/ce72861e52806c5ea4e8b9c7f4409672', 'raw': 'https://gist.github.com/FurkanGozukara/ce72861e52806c5ea4e8b9c7f4409672'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After generating images, use SUPIR to upscale 2x with maximum resemblance', 'raw': 'After generating images, use SUPIR to upscale 2x with maximum resemblance'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Short Conclusions', 'raw': 'Short Conclusions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Using 256 images certainly caused more overfitting than necessary', 'raw': 'Using 256 images certainly caused more overfitting than necessary'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '...', 'raw': '...'}]","Trained Myself With 256 Images on FLUX — Results Mind Blowing + +Detailed Full Workflow + +Medium article : https://medium.com/@furkangozukara/ultimate-flux-lora-training-tutorial-windows-and-cloud-deployment-abb72f21cbf8 + +Windows main tutorial : https://youtu.be/nySGu12Y05k + +Cloud tutorial for GPU poor or scaling : https://youtu.be/-uhL2nW7Ddw + +Full detailed results and conclusions : https://www.patreon.com/posts/111891669 + +Full config files and details to train : https://www.patreon.com/posts/110879657 + +SUPIR Upscaling (default settings are now perfect) : https://youtu.be/OYxVEvDf284 + +I used my Poco X6 Camera phone and solo taken images + +My dataset is far from being ready, thus I have used so many repeating and almost same images, but this was rather experimental + +Hopefully I will continue taking more shots and improve dataset and reduce size in future + +I trained Clip-L and T5-XXL Text Encoders as well + +Since there was too much push from community that my workflow won’t work with expressions, I had to take a break from research and use whatever I have + +I used my own researched workflow for training with Kohya GUI and also my own self developed SUPIR app batch upscaling with face upscaling and auto LLaVA captioning improvement + +Download images to see them in full size, the last provided grid is 50% downscaled + +Workflow + +Gather a dataset that has expressions and perspectives that you like after training, this is crucial, whatever you add, it can generate perfect + +Follow one of the LoRA training tutorials / guides + +After training your LoRA, use your favorite UI to generate images + +I prefer SwarmUI and here used prompts (you can add specific expressions to prompts) including face inpainting : + +https://gist.github.com/FurkanGozukara/ce72861e52806c5ea4e8b9c7f4409672 + +After generating images, use SUPIR to upscale 2x with maximum resemblance + +Short Conclusions + +Using 256 images certainly caused more overfitting than necessary + +...","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/sUBwos7vWfgT2BDmHIc6X.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/-D_20d-3rKJH0F2nAORjx.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/-9jMPBAHU-WaJvjpAlRfM.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/FzdS5AnS2e2bhO2tz9U3c.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/_4xFc66MQhskP3l7rcDly.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Q34ZwsAIpHsoPfXJ4U4jn.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/NExR-YyvIY_HiN9vaiNxU.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/ei8mTIVC5CkkFLDliybpb.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/_y_mhuuvGUxJV8ACCwgu3.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/3U7n6qwSzL3F8ZoICVp6V.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/NCt4TcnEVremvVuLfMB1E.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/XGasJqomZlVuXpoAxVhJ5.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/OB4UJy1r43F6fusiHa9Ph.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/qxbPUlTTCFxV5gerHvR3X.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/uHky7TLwyUyRwGaMALJ4v.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/2-S8qOkCfobdXmRaT8aiG.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/TvgXP1nOyHAlePZQsonFe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/o1EGKh-2MvAxgnDXdM1R_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Dhm54M6urGJHW5lRPVIyh.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Lry08XQCLKEhdtTXhCZF9.png'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'Kukedlc', 'nvhf', 'beratcmn', 'AIGUYCONTENT', 'kosalvann', 'AeonFutura', 'josephcidc', 'victor', 'mtasic85', 'JaayTee', 'BartekG'], 'count': 12}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'nickandbro', 'tail-call', 'boapps'], 'count': 4}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'Ashtonhashemi', 'Obenlia', 'wisam84'], 'count': 4}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'John6666', 'tazztone'], 'count': 3}, {'reaction': '😎', 'users': ['MonsterMMORPG', 'louisbrulenaudet'], 'count': 2}, {'reaction': '🤯', 'users': ['MonsterMMORPG', 'AeonFutura'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}]",2024-09-14 23:21:32,2024-09-14 23:21:32.695,[],/posts/MonsterMMORPG/590548810409525,4226,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png,290.0,Ankit Pal,aaditya,828861715602513,"[{'type': 'text', 'value': 'Last Week in Medical AI: Top Research ', 'raw': 'Last Week in Medical AI: Top Research '}, {'type': 'text', 'raw': 'Papers/Models', 'value': 'Papers/Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅(September 7 - September 14, 2024)', 'raw': '🏅(September 7 - September 14, 2024)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏅 Medical AI Paper of the week', 'raw': '🏅 Medical AI Paper of the week'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chai-1 Foundation model molecular structure prediction ', 'raw': 'Chai-1 Foundation model molecular structure prediction '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLMs & Benchmarks', 'raw': 'Medical LLMs & Benchmarks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- BrainWave: A Brain Signal Foundation Model', 'raw': '- BrainWave: A Brain Signal Foundation Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- DS-ViT: Vision Transformer for Alzheimer’s Diagnosis', 'raw': '- DS-ViT: Vision Transformer for Alzheimer’s Diagnosis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- EyeCLIP: Visual–language model for ophthalmic', 'raw': '- EyeCLIP: Visual–language model for ophthalmic'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Segment Anything Model for Tumor Segmentation', 'raw': '- Segment Anything Model for Tumor Segmentation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MEDIC: Evaluating LLMs in Clinical Applications', 'raw': '- MEDIC: Evaluating LLMs in Clinical Applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medical LLM Applications', 'raw': 'Medical LLM Applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- KARGEN: Radiology Report Generation LLMs', 'raw': '- KARGEN: Radiology Report Generation LLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- DrugAgent: Explainable Drug Repurposing Agents', 'raw': '- DrugAgent: Explainable Drug Repurposing Agents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Improving RAG in Medicine with Follow-up Questions', 'raw': '- Improving RAG in Medicine with Follow-up Questions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Frameworks and Methodologies', 'raw': 'Frameworks and Methodologies'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Infrastructure for Automatic Cell Segmentation', 'raw': '- Infrastructure for Automatic Cell Segmentation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Data Alignment for Dermatology AI', 'raw': '- Data Alignment for Dermatology AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Diagnostic Reasoning in Natural Language', 'raw': '- Diagnostic Reasoning in Natural Language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Two-Stage Instruction Fine-tuning Approach for Med', 'raw': '- Two-Stage Instruction Fine-tuning Approach for Med'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI in Healthcare Ethics ', 'raw': 'AI in Healthcare Ethics '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Concerns and Choices of Using LLMs for Healthcare ', 'raw': '- Concerns and Choices of Using LLMs for Healthcare '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Understanding Fairness in Recommender Systems', 'raw': '- Understanding Fairness in Recommender Systems'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Towards Fairer Health Recommendations', 'raw': '- Towards Fairer Health Recommendations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check the full thread: ', 'raw': 'Check the full thread: '}, {'type': 'link', 'href': 'https://x.com/OpenlifesciAI/status/1832476252260712788', 'raw': 'https://x.com/OpenlifesciAI/status/1832476252260712788'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thank you for your continued support and love for this series! Stay up-to-date with weekly updates on Medical LLMs, datasets, and top research papers by following ', 'raw': 'Thank you for your continued support and love for this series! Stay up-to-date with weekly updates on Medical LLMs, datasets, and top research papers by following '}, {'type': 'mention', 'user': 'aaditya', 'raw': '@aaditya'}, {'type': 'text', 'value': ' 🤗', 'raw': ' 🤗'}]","Last Week in Medical AI: Top Research Papers/Models +🏅(September 7 - September 14, 2024) + +🏅 Medical AI Paper of the week +Chai-1 Foundation model molecular structure prediction + +Medical LLMs & Benchmarks +- BrainWave: A Brain Signal Foundation Model +- DS-ViT: Vision Transformer for Alzheimer’s Diagnosis +- EyeCLIP: Visual–language model for ophthalmic +- Segment Anything Model for Tumor Segmentation +- MEDIC: Evaluating LLMs in Clinical Applications + +Medical LLM Applications +- KARGEN: Radiology Report Generation LLMs +- DrugAgent: Explainable Drug Repurposing Agents +- Improving RAG in Medicine with Follow-up Questions + +Frameworks and Methodologies +- Infrastructure for Automatic Cell Segmentation +- Data Alignment for Dermatology AI +- Diagnostic Reasoning in Natural Language +- Two-Stage Instruction Fine-tuning Approach for Med + +AI in Healthcare Ethics +- Concerns and Choices of Using LLMs for Healthcare +- Understanding Fairness in Recommender Systems +- Towards Fairer Health Recommendations + +Check the full thread: https://x.com/OpenlifesciAI/status/1832476252260712788 + +Thank you for your continued support and love for this series! Stay up-to-date with weekly updates on Medical LLMs, datasets, and top research papers by following @aaditya 🤗","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f3fe13d79c1ba4c353d0c19/njB7xQ0-YXOviCClYQ3Ns.jpeg'}]","[{'_id': '5f3fe13d79c1ba4c353d0c19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png', 'fullname': 'Ankit Pal', 'name': 'aaditya', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 290}]","[{'reaction': '👍', 'users': ['aaditya', 'onekq', 'cchristophe', 'sasikiran', 'blanchon', 'Chunte', 'shetumohanto', 'ajibawa-2023'], 'count': 8}, {'reaction': '❤️', 'users': ['aaditya', 'Svngoku', 'shetumohanto'], 'count': 3}, {'reaction': '🚀', 'users': ['aaditya', 'John6666', 'hibana2077'], 'count': 3}, {'reaction': '🔥', 'users': ['aaditya', 'Kukedlc', 'blanchon'], 'count': 3}, {'reaction': '🤗', 'users': ['aaditya'], 'count': 1}]",2024-09-14 23:09:17,2024-09-14 23:09:17.036,[],/posts/aaditya/828861715602513,2568,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6685d39f64da708c0f553c5d/KEw8NG84_vxEH6xhu-4fu.png,138.0,Bot,inflatebot,238083443023699,"[{'type': 'text', 'value': 'Anybody ever play Final Fantasy: Crystal Chronicles?', 'raw': 'Anybody ever play Final Fantasy: Crystal Chronicles?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Like, *really* play it?', 'raw': 'Like, *really* play it?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mag Mell has been in my head recently. What a place that was.', 'raw': 'Mag Mell has been in my head recently. What a place that was.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Those cocoons looked like I could lay down inside of one, and it would be the most powerful sleep of a lifetime, with dreams that would last one thousand years, and I'd wake up with the wisdom of generations."", 'raw': ""Those cocoons looked like I could lay down inside of one, and it would be the most powerful sleep of a lifetime, with dreams that would last one thousand years, and I'd wake up with the wisdom of generations.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '...Hey, anybody like text adventures?', 'raw': '...Hey, anybody like text adventures?'}]","Anybody ever play Final Fantasy: Crystal Chronicles? +Like, *really* play it? + +Mag Mell has been in my head recently. What a place that was. + +Those cocoons looked like I could lay down inside of one, and it would be the most powerful sleep of a lifetime, with dreams that would last one thousand years, and I'd wake up with the wisdom of generations. + +...Hey, anybody like text adventures?",[],[],[],2024-09-14 19:01:26,2024-09-14 21:05:27.206,"[{'_id': '6685d39f64da708c0f553c5d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6685d39f64da708c0f553c5d/KEw8NG84_vxEH6xhu-4fu.png', 'fullname': 'Bot', 'name': 'inflatebot', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 138, 'isFollowing': False}]",/posts/inflatebot/238083443023699,519,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,273510872632165,"[{'type': 'text', 'value': 'For all the Muslims out there who are interested in Quran and its tafsir (explanations). This humble dataset consists of 84 different books of tafsir for nearly all the ayat in the Quran:', 'raw': 'For all the Muslims out there who are interested in Quran and its tafsir (explanations). This humble dataset consists of 84 different books of tafsir for nearly all the ayat in the Quran:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'MohamedRashad/Quran-Tafseer'}, 'url': 'https://huggingface.co/datasets/MohamedRashad/Quran-Tafseer', 'raw': 'https://huggingface.co/datasets/MohamedRashad/Quran-Tafseer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I hope it helps someone to build something nice and useful with it ^_^', 'raw': 'I hope it helps someone to build something nice and useful with it ^_^'}]","For all the Muslims out there who are interested in Quran and its tafsir (explanations). This humble dataset consists of 84 different books of tafsir for nearly all the ayat in the Quran: +https://huggingface.co/datasets/MohamedRashad/Quran-Tafseer + +I hope it helps someone to build something nice and useful with it ^_^",[],[],"[{'reaction': '❤️', 'users': ['MohamedRashad', 'Etherll', 'damerajee', 'xi0v', 'AtefAbdo99', 'Renegadesoffun', 'tousif1988', 'AbubakkarSiddique', 'PaOumar', 'SadilKhan', 'wisam84', 'ibrahim313'], 'count': 12}, {'reaction': '👀', 'users': ['John6666', 'xi0v', 'jobinus', 'AtefAbdo99', 'djuna'], 'count': 5}]",2024-09-14 15:17:20,2024-09-15 07:25:28.474,[],/posts/MohamedRashad/273510872632165,3524,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png,273.0,Ali El Filali,alielfilali01,352438412508427,"[{'type': 'text', 'value': 'Are the servers down or what ? Am i the only one experiencing this error :', 'raw': 'Are the servers down or what ? Am i the only one experiencing this error :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""HfHubHTTPError: 500 Server Error: Internal Server Error for url: https://huggingface.co/api/datasets/...../)\n\nInternal Error - We're working hard to fix this as soon as possible!"", 'raw': ""```\nHfHubHTTPError: 500 Server Error: Internal Server Error for url: https://huggingface.co/api/datasets/...../)\n\nInternal Error - We're working hard to fix this as soon as possible!\n```""}]","Are the servers down or what ? Am i the only one experiencing this error : +``` +HfHubHTTPError: 500 Server Error: Internal Server Error for url: https://huggingface.co/api/datasets/...../) + +Internal Error - We're working hard to fix this as soon as possible! +```",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-14 11:30:39,2024-09-14 15:02:27.110,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '626237d9bbcbd1c34f1bb231', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png', 'fullname': 'Ali El Filali', 'name': 'alielfilali01', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 273, 'isFollowing': False}]",/posts/alielfilali01/352438412508427,617,,2 +/avatars/33504742434a0c35019a227ca4cf1170.svg,10.0,Shreyas,Shreyas094,345918148011297,"[{'type': 'text', 'value': 'Help me to upgrade my model. ', 'raw': 'Help me to upgrade my model. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hi all, so I am a complete beginner in coding, however, with the help of Claude (similar to Matt :P) and GPT 4o have been able to develop this RAG PDF summarizer/Q&A plus a web search tool. ', 'raw': 'Hi all, so I am a complete beginner in coding, however, with the help of Claude (similar to Matt :P) and GPT 4o have been able to develop this RAG PDF summarizer/Q&A plus a web search tool. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The application is specifically built for summarization task including summarizing a financial document, news article, resume, research document, call transcript, etc. ', 'raw': 'The application is specifically built for summarization task including summarizing a financial document, news article, resume, research document, call transcript, etc. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The space could be found here: ', 'raw': 'The space could be found here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Shreyas094/SearchGPT'}, 'url': 'https://huggingface.co/spaces/Shreyas094/SearchGPT', 'raw': 'https://huggingface.co/spaces/Shreyas094/SearchGPT'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The news tool simply use duckduckgo chat to generate the search results using llama 3.1 70bn model. ', 'raw': 'The news tool simply use duckduckgo chat to generate the search results using llama 3.1 70bn model. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I want your support to fine tune the retrieval task for handling more unstructured documents. ', 'raw': 'I want your support to fine tune the retrieval task for handling more unstructured documents. '}]","Help me to upgrade my model. + +Hi all, so I am a complete beginner in coding, however, with the help of Claude (similar to Matt :P) and GPT 4o have been able to develop this RAG PDF summarizer/Q&A plus a web search tool. + +The application is specifically built for summarization task including summarizing a financial document, news article, resume, research document, call transcript, etc. + +The space could be found here: https://huggingface.co/spaces/Shreyas094/SearchGPT + +The news tool simply use duckduckgo chat to generate the search results using llama 3.1 70bn model. + +I want your support to fine tune the retrieval task for handling more unstructured documents. ",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-10 14:40:13,2024-09-12 08:18:31.786,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '6671d9237625ac621175333a', 'avatarUrl': '/avatars/33504742434a0c35019a227ca4cf1170.svg', 'fullname': 'Shreyas', 'name': 'Shreyas094', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}, {'_id': '635c2f5c3cb827d58118bb01', 'avatarUrl': '/avatars/0087f207c06a793c55ed0489ff793e70.svg', 'fullname': 'nicolo', 'name': 'nicolollo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Shreyas094/345918148011297,696,,10 +https://cdn-avatars.huggingface.co/v1/production/uploads/1669551186189-63732ebbbd81fae2b3aaf3fb.jpeg,296.0,Knut Jägersberg,KnutJaegersberg,283814276646746,"[{'type': 'text', 'value': 'appvoid/arco', 'raw': 'appvoid/arco'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'arco consistently outperforms every sota model below 600m parameters on average', 'raw': 'arco consistently outperforms every sota model below 600m parameters on average'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'appvoid/arco'}, 'url': 'https://huggingface.co/appvoid/arco', 'raw': 'https://huggingface.co/appvoid/arco'}]","appvoid/arco + +arco consistently outperforms every sota model below 600m parameters on average + +https://huggingface.co/appvoid/arco","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63732ebbbd81fae2b3aaf3fb/xYaCXyE8HoaVca7ozpZLy.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '❤️', 'users': ['appvoid'], 'count': 1}]",2024-09-10 12:13:20,2024-09-10 12:14:08.755,[],/posts/KnutJaegersberg/283814276646746,1176,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,342637851318232,"[{'type': 'text', 'value': '> Article read: Simple guide to LLM inference and to TGI ', 'raw': '> Article read: Simple guide to LLM inference and to TGI '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I\'ve just read article ""LLM inference at scale with TGI"" by ', 'raw': 'I\'ve just read article ""LLM inference at scale with TGI"" by '}, {'type': 'mention', 'user': 'martinigoyanes', 'raw': '@martinigoyanes'}, {'type': 'text', 'value': "" . It's really good content, a must-read if you want a good low-level intro to LLM inference with TGI!"", 'raw': "" . It's really good content, a must-read if you want a good low-level intro to LLM inference with TGI!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My takeaways:', 'raw': 'My takeaways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How does inference work?', 'raw': 'How does inference work?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Prefill: the input prompt is tokenized on CPU, then transferred to GPU. Then one single forward pass generates the initial token.', 'raw': '🧠 Prefill: the input prompt is tokenized on CPU, then transferred to GPU. Then one single forward pass generates the initial token.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔄 Decode: the model generates (""decodes"") tokens one by one, each time appending the new token to the current input of size N to then generate a new token again with this augmented input of length N+1. This loop ends either when a specific token called ""End-of-sequence"" is generated or when the completion reaches a pre-specified maximum length. Then the sequence is de-tokenized on CPU to yield text again.', 'raw': '🔄 Decode: the model generates (""decodes"") tokens one by one, each time appending the new token to the current input of size N to then generate a new token again with this augmented input of length N+1. This loop ends either when a specific token called ""End-of-sequence"" is generated or when the completion reaches a pre-specified maximum length. Then the sequence is de-tokenized on CPU to yield text again.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': "" ⏱️ This step's speed determines the Time Per Output Token, which directly translates to the key metric: Throughput"", 'raw': "" ⏱️ This step's speed determines the Time Per Output Token, which directly translates to the key metric: Throughput""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤔 How was the separation between the two steps decided ? Like, why does prefill include this strange generation of only one token at then end?', 'raw': '🤔 How was the separation between the two steps decided ? Like, why does prefill include this strange generation of only one token at then end?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ The cost of attention scales quadratically with the number of tokens, so it can really explode quickly.', 'raw': '➡️ The cost of attention scales quadratically with the number of tokens, so it can really explode quickly.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To compensate for that, a really important technique called KV caching was devised: using the fact that when generating token N+1, the Key and Value (K and V) matrices generated inside the Transformers are a simple extension from the K and V from the previous step, the model caches the K and V matrices between steps : thus the separation - the prefill part is the part that prepares this KV cache, while the decoding is the one that leverages it and expands it by one at each step.', 'raw': 'To compensate for that, a really important technique called KV caching was devised: using the fact that when generating token N+1, the Key and Value (K and V) matrices generated inside the Transformers are a simple extension from the K and V from the previous step, the model caches the K and V matrices between steps : thus the separation - the prefill part is the part that prepares this KV cache, while the decoding is the one that leverages it and expands it by one at each step.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TGI-specific takeaways:', 'raw': 'TGI-specific takeaways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ TGI has many SOTA techniques for decoding: Paged Attention, KV Caching and Flash Attention…', 'raw': '⚙️ TGI has many SOTA techniques for decoding: Paged Attention, KV Caching and Flash Attention…'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🔀 TGI's router handles generations finishing early because of an EOS token: instead of static batching, it continuously batches requests to the inference engine & filters away finished requests."", 'raw': ""🔀 TGI's router handles generations finishing early because of an EOS token: instead of static batching, it continuously batches requests to the inference engine & filters away finished requests.""}]","> Article read: Simple guide to LLM inference and to TGI + +I've just read article ""LLM inference at scale with TGI"" by @martinigoyanes . It's really good content, a must-read if you want a good low-level intro to LLM inference with TGI! + +My takeaways: + +How does inference work? +🧠 Prefill: the input prompt is tokenized on CPU, then transferred to GPU. Then one single forward pass generates the initial token. +🔄 Decode: the model generates (""decodes"") tokens one by one, each time appending the new token to the current input of size N to then generate a new token again with this augmented input of length N+1. This loop ends either when a specific token called ""End-of-sequence"" is generated or when the completion reaches a pre-specified maximum length. Then the sequence is de-tokenized on CPU to yield text again. + ⏱️ This step's speed determines the Time Per Output Token, which directly translates to the key metric: Throughput + +🤔 How was the separation between the two steps decided ? Like, why does prefill include this strange generation of only one token at then end? +➡️ The cost of attention scales quadratically with the number of tokens, so it can really explode quickly. +To compensate for that, a really important technique called KV caching was devised: using the fact that when generating token N+1, the Key and Value (K and V) matrices generated inside the Transformers are a simple extension from the K and V from the previous step, the model caches the K and V matrices between steps : thus the separation - the prefill part is the part that prepares this KV cache, while the decoding is the one that leverages it and expands it by one at each step. + +TGI-specific takeaways: +⚙️ TGI has many SOTA techniques for decoding: Paged Attention, KV Caching and Flash Attention… +🔀 TGI's router handles generations finishing early because of an EOS token: instead of static batching, it continuously batches requests to the inference engine & filters away finished requests.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/8_CFLfbkMRDWj8QkgTcRh.png'}]","[{'_id': '65de001d6a6643b02251fd2a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65de001d6a6643b02251fd2a/8YaiGgRzkOG6WAsY-ny-t.jpeg', 'fullname': 'Martin Iglesias Goyanes', 'name': 'martinigoyanes', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}]","[{'reaction': '👀', 'users': ['John6666', 'osanseviero'], 'count': 2}]",2024-09-10 09:01:56,2024-09-10 09:22:39.068,"[{'_id': '63d10d4e8eaa4831005e92b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg', 'fullname': 'Aymeric Roucher', 'name': 'm-ric', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1652, 'isFollowing': False}]",/posts/m-ric/342637851318232,1229,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/603945d6db430f160dced222/Rf3ChIRWR8eBi7sEVgl4s.png,39.0,Abid Ali Awan,kingabzpro,368696385907237,"[{'type': 'text', 'value': 'I never imagined that Jenkins could be as powerful and easy to implement as GitHub Actions. Loving it. 🥰', 'raw': 'I never imagined that Jenkins could be as powerful and easy to implement as GitHub Actions. Loving it. 🥰'}, {'type': 'new_line', 'raw': '\n'}]","I never imagined that Jenkins could be as powerful and easy to implement as GitHub Actions. Loving it. 🥰 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/603945d6db430f160dced222/T_TaJszJgi6hYYw70TEYl.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/603945d6db430f160dced222/9uhR9jM12mDiqAHLz_1MU.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/603945d6db430f160dced222/ifHurdFtQqFqAuqKmNSQV.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'kingabzpro', 'kevinch3'], 'count': 3}, {'reaction': '🔥', 'users': ['sanjay7178'], 'count': 1}]",2024-09-10 08:58:46,2024-09-10 08:58:46.036,[],/posts/kingabzpro/368696385907237,1691,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,748889312583520,"[{'type': 'text', 'value': '🎓 Introducing the конспекты-уроков.рф Lesson Plans Dataset - ', 'raw': '🎓 Introducing the конспекты-уроков.рф Lesson Plans Dataset - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/classnotes'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/classnotes', 'raw': 'https://huggingface.co/datasets/nyuuzyou/classnotes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset highlights:', 'raw': 'Dataset highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Metadata for 65,068 lesson plans from конспекты-уроков.рф', 'raw': '- Metadata for 65,068 lesson plans from конспекты-уроков.рф'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 58,433 lesson plans available in original format', 'raw': '- 58,433 lesson plans available in original format'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual content: Primarily Russian, with some Kazakh, Ukrainian, Belarusian, and English', 'raw': '- Multilingual content: Primarily Russian, with some Kazakh, Ukrainian, Belarusian, and English'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Each entry includes: URL, title, description, author, publication date, file size, and download link', 'raw': '- Each entry includes: URL, title, description, author, publication date, file size, and download link'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Data reflects educational materials accessible through the конспекты-уроков.рф platform', 'raw': '- Data reflects educational materials accessible through the конспекты-уроков.рф platform'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Licensed under Creative Commons (', 'raw': '- Licensed under Creative Commons ('}, {'type': 'link', 'href': 'https://creativecommons.org/licenses/by-nc/3.0/deed.en', 'raw': 'https://creativecommons.org/licenses/by-nc/3.0/deed.en'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset offers a unique window into online educational resources, particularly in Russian-language contexts. It provides opportunities for analyzing lesson plan trends, topic distributions, and language patterns in educational materials. The dataset is particularly well-suited for tasks such as text classification and text retrieval in multilingual educational settings.', 'raw': 'This dataset offers a unique window into online educational resources, particularly in Russian-language contexts. It provides opportunities for analyzing lesson plan trends, topic distributions, and language patterns in educational materials. The dataset is particularly well-suited for tasks such as text classification and text retrieval in multilingual educational settings.'}]","🎓 Introducing the конспекты-уроков.рф Lesson Plans Dataset - https://huggingface.co/datasets/nyuuzyou/classnotes + +Dataset highlights: +- Metadata for 65,068 lesson plans from конспек��ы-уроков.рф +- 58,433 lesson plans available in original format +- Multilingual content: Primarily Russian, with some Kazakh, Ukrainian, Belarusian, and English +- Each entry includes: URL, title, description, author, publication date, file size, and download link +- Data reflects educational materials accessible through the конспекты-уроков.рф platform +- Licensed under Creative Commons (https://creativecommons.org/licenses/by-nc/3.0/deed.en) + +This dataset offers a unique window into online educational resources, particularly in Russian-language contexts. It provides opportunities for analyzing lesson plan trends, topic distributions, and language patterns in educational materials. The dataset is particularly well-suited for tasks such as text classification and text retrieval in multilingual educational settings.",[],[],"[{'reaction': '👀', 'users': ['John6666', 'louisbrulenaudet'], 'count': 2}]",2024-09-10 08:05:34,2024-09-10 10:11:31.082,"[{'_id': '6637ef9836f6e895789b5225', 'avatarUrl': '/avatars/e461a66932ae0a04d65f70a2fe250441.svg', 'fullname': 'Jessica Rose', 'name': 'jesslynnrose', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '643ac5d2e2b979ae6144d68c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png', 'fullname': 'nyuuzyou', 'name': 'nyuuzyou', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244, 'isFollowing': False}]",/posts/nyuuzyou/748889312583520,728,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,186931011918426,"[{'type': 'text', 'value': 'So awesome , now i can deploy a jupyterlab on huggingface and deploy gradio from the jupyterlab ', 'raw': 'So awesome , now i can deploy a jupyterlab on huggingface and deploy gradio from the jupyterlab '}]","So awesome , now i can deploy a jupyterlab on huggingface and deploy gradio from the jupyterlab ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62a3bb1cd0d8c2c2169f0b88/UPIs0hyJDtRhl-urN75wo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62a3bb1cd0d8c2c2169f0b88/WcCr7HO7oxuLL8xxw2FX-.png'}]",[],"[{'reaction': '🔥', 'users': ['louisbrulenaudet', 'John6666', 'Blane187', 'osanseviero', 'edison1', 'jaykakadiya18'], 'count': 6}, {'reaction': '🚀', 'users': ['John6666', 'lucianosb', 'edison1', 'den0620'], 'count': 4}]",2024-09-10 06:42:39,2024-09-10 06:42:39.718,[],/posts/Tonic/186931011918426,2671,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,242979079352833,"[{'type': 'text', 'value': 'NEW RELEASE!', 'raw': 'NEW RELEASE!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- MOTH is a generalist chat model, using high quality synthetic data to improve general performance.', 'raw': '- MOTH is a generalist chat model, using high quality synthetic data to improve general performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Currently available for Llama 3.1 and Gemma 2, more models to follow in the future.', 'raw': '- Currently available for Llama 3.1 and Gemma 2, more models to follow in the future.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'get the models:', 'raw': 'get the models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'sequelbox/Llama3.1-8B-MOTH'}, 'url': 'https://huggingface.co/sequelbox/Llama3.1-8B-MOTH', 'raw': 'https://huggingface.co/sequelbox/Llama3.1-8B-MOTH'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'sequelbox/gemma-2-9B-MOTH'}, 'url': 'https://huggingface.co/sequelbox/gemma-2-9B-MOTH', 'raw': 'https://huggingface.co/sequelbox/gemma-2-9B-MOTH'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'get the dataset:', 'raw': 'get the dataset:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'sequelbox/Supernova'}, 'url': 'https://huggingface.co/datasets/sequelbox/Supernova', 'raw': 'https://huggingface.co/datasets/sequelbox/Supernova'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '<3 for everyone to use <3', 'raw': '<3 for everyone to use <3'}]","NEW RELEASE! + +- MOTH is a generalist chat model, using high quality synthetic data to improve general performance. +- Currently available for Llama 3.1 and Gemma 2, more models to follow in the future. + +get the models: +https://huggingface.co/sequelbox/Llama3.1-8B-MOTH +https://huggingface.co/sequelbox/gemma-2-9B-MOTH + +get the dataset: +https://huggingface.co/datasets/sequelbox/Supernova + +<3 for everyone to use <3",[],[],"[{'reaction': '🔥', 'users': ['takeraparterer', 'John6666', 'not-lain'], 'count': 3}]",2024-09-10 00:02:17,2024-09-10 00:06:07.639,"[{'_id': '63444f2687964b331809eb55', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png', 'fullname': 't.d.a.g.', 'name': 'sequelbox', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 89, 'isFollowing': False}]",/posts/sequelbox/242979079352833,1226,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,391291875700546,"[{'type': 'text', 'value': 'Ultimate FLUX LoRA Training Tutorial: Windows and Cloud Deployment', 'raw': 'Ultimate FLUX LoRA Training Tutorial: Windows and Cloud Deployment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have done total 104 different LoRA trainings and compared each one of them to find the very best hyper parameters and the workflow for FLUX LoRA training by using Kohya GUI training script.', 'raw': 'I have done total 104 different LoRA trainings and compared each one of them to find the very best hyper parameters and the workflow for FLUX LoRA training by using Kohya GUI training script.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can see all the done experiments’ checkpoint names and their repo links in following public post: ', 'raw': 'You can see all the done experiments’ checkpoint names and their repo links in following public post: '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/110838414', 'raw': 'https://www.patreon.com/posts/110838414'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After completing all these FLUX LoRA trainings by using the most VRAM optimal and performant optimizer Adafactor I came up with all of the following ranked ready to use configurations.', 'raw': 'After completing all these FLUX LoRA trainings by using the most VRAM optimal and performant optimizer Adafactor I came up with all of the following ranked ready to use configurations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can download all the configurations, all research data, installers and instructions at the following link : ', 'raw': 'You can download all the configurations, all research data, installers and instructions at the following link : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/110879657', 'raw': 'https://www.patreon.com/posts/110879657'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tutorials', 'raw': 'Tutorials'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I also have prepared 2 full tutorials. First tutorial covers how to train and use the best FLUX LoRA locally on your Windows computer : ', 'raw': 'I also have prepared 2 full tutorials. First tutorial covers how to train and use the best FLUX LoRA locally on your Windows computer : '}, {'type': 'link', 'href': 'https://youtu.be/nySGu12Y05k', 'raw': 'https://youtu.be/nySGu12Y05k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is the main tutorial that you have to watch without skipping to learn everything. It has total 74 chapters, manually written English captions. It is a perfect resource to become 0 to hero for FLUX LoRA training.', 'raw': 'This is the main tutorial that you have to watch without skipping to learn everything. It has total 74 chapters, manually written English captions. It is a perfect resource to become 0 to hero for FLUX LoRA training.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The second tutorial I have prepared is for how to train FLUX LoRA on cloud. This tutorial is super extremely important for several reasons. If you don’t have a powerful GPU, you can rent a very powerful and very cheap GPU on Massed Compute and RunPod. I prefer Massed Compute since it is faster and cheaper with our special coupon SECourses. Another reason is that in this tutorial video, I have fully in details shown how to train on a multiple GPU setup to scale your training speed. Moreover, I have shown how to upload your checkpoints and files ultra fast to Hugging Face for saving and transferring for free. Still watch first above Windows tutorial to be able to follow below cloud tutorial : ', 'raw': 'The second tutorial I have prepared is for how to train FLUX LoRA on cloud. This tutorial is super extremely important for several reasons. If you don’t have a powerful GPU, you can rent a very powerful and very cheap GPU on Massed Compute and RunPod. I prefer Massed Compute since it is faster and cheaper with our special coupon SECourses. Another reason is that in this tutorial video, I have fully in details shown how to train on a multiple GPU setup to scale your training speed. Moreover, I have shown how to upload your checkpoints and files ultra fast to Hugging Face for saving and transferring for free. Still watch first above Windows tutorial to be able to follow below cloud tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/-uhL2nW7Ddw', 'raw': 'https://youtu.be/-uhL2nW7Ddw'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For upscaling SUPIR used : ', 'raw': 'For upscaling SUPIR used : '}, {'type': 'link', 'href': 'https://youtu.be/OYxVEvDf284', 'raw': 'https://youtu.be/OYxVEvDf284'}, {'type': 'new_line', 'raw': '\n'}]","Ultimate FLUX LoRA Training Tutorial: Windows and Cloud Deployment + +I have done total 104 different LoRA trainings and compared each one of them to find the very best hyper parameters and the workflow for FLUX LoRA training by using Kohya GUI training script. + +You can see all the done experiments’ checkpoint names and their repo links in following public post: https://www.patreon.com/posts/110838414 + +After completing all these FLUX LoRA trainings by using the most VRAM optimal and performant optimizer Adafactor I came up with all of the following ranked ready to use configurations. + +You can download all the configurations, all research data, installers and instructions at the following link : https://www.patreon.com/posts/110879657 + + +Tutorials +I also have prepared 2 full tutorials. First tutorial covers how to train and use the best FLUX LoRA locally on your Windows computer : https://youtu.be/nySGu12Y05k + +This is the main tutorial that you have to watch without skipping to learn everything. It has total 74 chapters, manually written English captions. It is a perfect resource to become 0 to hero for FLUX LoRA training. + +The second tutorial I have prepared is for how to train FLUX LoRA on cloud. This tutorial is super extremely important for several reasons. If you don’t have a powerful GPU, you can rent a very powerful and very cheap GPU on Massed Compute and RunPod. I prefer Massed Compute since it is faster and cheaper with our special coupon SECourses. Another reason is that in this tutorial video, I have fully in details shown how to train on a multiple GPU setup to scale your training speed. Moreover, I have shown how to upload your checkpoints and files ultra fast to Hugging Face for saving and transferring for free. Still watch first above Windows tutorial to be able to follow below cloud tutorial : https://youtu.be/-uhL2nW7Ddw + +For upscaling SUPIR used : https://youtu.be/OYxVEvDf284 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/VbYzIJOTMctOkwNn3mVM3.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/uU3ArOuzF5ZzLWjLmyKKq.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/MvLENXGEpU-cD_-rQCynV.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/6L_NpctOxUsxaLXHRw4iN.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Swc_yxrbhAPJbTReGj6m7.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/hhNHQY2DOVifJXacyn0sc.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/xfoWcolcRNJTbk9pLajyg.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/IU1llIeB34U620rcFvp_L.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/p9-e-fydLKkli3Ae9IBxe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/f1MG0uCnw2SI14TO46aua.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/xNRyP-HO1H6GxJpha2Hy9.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/mLcxi2jxUsPNA5NdrVXpP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/10l3pTYcJ3Yce5NMUBcdP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/5MP9BolsMuwcQ7OquVHpC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/QcZJhZrcqjL6SqjGe8dS0.png'}]",[],"[{'reaction': '🚀', 'users': ['MonsterMMORPG', 'John6666', 'ai-everyday', 'cliff5968', 'matchaaaaa'], 'count': 5}, {'reaction': '🔥', 'users': ['MonsterMMORPG', 'lucianosb'], 'count': 2}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'kuohua'], 'count': 2}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'cliff5968'], 'count': 2}, {'reaction': '👀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2024-09-09 23:17:26,2024-09-09 23:17:26.320,[],/posts/MonsterMMORPG/391291875700546,2709,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,294238824744539,"[{'type': 'text', 'value': 'I got my account back!', 'raw': 'I got my account back!'}]",I got my account back!,[],[],"[{'reaction': '👍', 'users': ['John6666', 'Delik', 'LeroyDyer', 'AtAndDev'], 'count': 4}, {'reaction': '🔥', 'users': ['AtAndDev'], 'count': 1}]",2024-09-09 21:35:02,2024-09-10 09:47:47.882,"[{'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}]",/posts/nroggendorff/294238824744539,1360,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,789970389359069,"[{'type': 'text', 'value': 'We can’t think in more than three dimensions. ', 'raw': 'We can’t think in more than three dimensions. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But we have no problem doing math and writing computer programs in many dimensions. It just works.', 'raw': 'But we have no problem doing math and writing computer programs in many dimensions. It just works.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I find that extremely crazy.', 'raw': 'I find that extremely crazy.'}]","We can’t think in more than three dimensions. + +But we have no problem doing math and writing computer programs in many dimensions. It just works. + +I find that extremely crazy.",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-09 19:29:28,2024-09-10 10:23:45.798,"[{'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '629a173153a72d997d3f57d0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg', 'fullname': 'Santiago Viquez', 'name': 'santiviquez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 86, 'isFollowing': False}, {'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/santiviquez/789970389359069,444,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,436311113936516,"[{'type': 'text', 'value': '🙋🏻\u200d♂️Hey there folks,', 'raw': '🙋🏻\u200d♂️Hey there folks,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Did you see the new coding model from ', 'raw': 'Did you see the new coding model from '}, {'type': 'mention', 'user': '01-ai', 'raw': '@01-ai'}, {'type': 'text', 'value': ' ? ', 'raw': ' ? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'collection : ', 'raw': 'collection : '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': '01-ai/yi-coder-66bdb00f5bdd611f9a008f30'}, 'url': 'https://huggingface.co/collections/01-ai/yi-coder-66bdb00f5bdd611f9a008f30', 'raw': 'https://huggingface.co/collections/01-ai/yi-coder-66bdb00f5bdd611f9a008f30'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'demo : ', 'raw': 'demo : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Tonic/Yi-Coder-9B'}, 'url': 'https://huggingface.co/spaces/Tonic/Yi-Coder-9B', 'raw': 'https://huggingface.co/spaces/Tonic/Yi-Coder-9B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'achieves SOTA on benchmarks , 125K context window , 55 languages including Docker, Js and many more 🚀', 'raw': 'achieves SOTA on benchmarks , 125K context window , 55 languages including Docker, Js and many more 🚀'}, {'type': 'new_line', 'raw': '\n'}]","🙋🏻‍♂️Hey there folks, + +Did you see the new coding model from @01-ai ? + +collection : https://huggingface.co/collections/01-ai/yi-coder-66bdb00f5bdd611f9a008f30 +demo : https://huggingface.co/spaces/Tonic/Yi-Coder-9B + +achieves SOTA on benchmarks , 125K context window , 55 languages including Docker, Js and many more 🚀 +",[],[],"[{'reaction': '🚀', 'users': ['web3builder', 'John6666', 'louisbrulenaudet', 'djuna', 'KingNish'], 'count': 5}]",2024-09-05 09:56:55,2024-09-05 13:32:54.611,"[{'_id': '64a07f6a37bfb5202b083334', 'avatarUrl': '/avatars/1280748c5a2e24a8f00618b544c9749a.svg', 'fullname': 'leuneli', 'name': 'leuneli', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Tonic/436311113936516,1096,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,995511131459162,"[{'type': 'text', 'value': ""If you have documents that do not only have text and you're doing retrieval or RAG (using OCR and LLMs), give it up and give ColPali and vision language models a try 🤗"", 'raw': ""If you have documents that do not only have text and you're doing retrieval or RAG (using OCR and LLMs), give it up and give ColPali and vision language models a try 🤗""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Why? Documents consist of multiple modalities: layout, table, text, chart, images. Document processing pipelines often consist of multiple models and they're immensely brittle and slow. 🥲"", 'raw': ""Why? Documents consist of multiple modalities: layout, table, text, chart, images. Document processing pipelines often consist of multiple models and they're immensely brittle and slow. 🥲""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How? ColPali is a ColBERT-like document retrieval model built on PaliGemma, it operates over image patches directly, and indexing takes far less time with more accuracy. You can use it for retrieval, and if you want to do retrieval augmented generation, find the closest document, and do not process it, give it directly to a VLM like Qwen2-VL (as image input) and give your text query. 🤝', 'raw': 'How? ColPali is a ColBERT-like document retrieval model built on PaliGemma, it operates over image patches directly, and indexing takes far less time with more accuracy. You can use it for retrieval, and if you want to do retrieval augmented generation, find the closest document, and do not process it, give it directly to a VLM like Qwen2-VL (as image input) and give your text query. 🤝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is much faster + you do not lose out on any information + much easier to maintain too! 🥳', 'raw': 'This is much faster + you do not lose out on any information + much easier to maintain too! 🥳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Multimodal RAG ', 'raw': 'Multimodal RAG '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/multimodal-rag-66d97602e781122aae0a5139'}, 'url': 'https://huggingface.co/collections/merve/multimodal-rag-66d97602e781122aae0a5139', 'raw': 'https://huggingface.co/collections/merve/multimodal-rag-66d97602e781122aae0a5139'}, {'type': 'text', 'value': ' 💬', 'raw': ' 💬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Document AI (made it way before, for folks who want structured input/output and can fine-tune a model) ', 'raw': 'Document AI (made it way before, for folks who want structured input/output and can fine-tune a model) '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/awesome-document-ai-65ef1cdc2e97ef9cc85c898e'}, 'url': 'https://huggingface.co/collections/merve/awesome-document-ai-65ef1cdc2e97ef9cc85c898e', 'raw': 'https://huggingface.co/collections/merve/awesome-document-ai-65ef1cdc2e97ef9cc85c898e'}, {'type': 'text', 'value': ' 📖', 'raw': ' 📖'}]","If you have documents that do not only have text and you're doing retrieval or RAG (using OCR and LLMs), give it up and give ColPali and vision language models a try 🤗 + +Why? Documents consist of multiple modalities: layout, table, text, chart, images. Document processing pipelines often consist of multiple models and they're immensely brittle and slow. 🥲 + +How? ColPali is a ColBERT-like document retrieval model built on PaliGemma, it operates over image patches directly, and indexing takes far less time with more accuracy. You can use it for retrieval, and if you want to do retrieval augmented generation, find the closest document, and do not process it, give it directly to a VLM like Qwen2-VL (as image input) and give your text query. 🤝 + +This is much faster + you do not lose out on any information + much easier to maintain too! 🥳 + +Multimodal RAG https://huggingface.co/collections/merve/multimodal-rag-66d97602e781122aae0a5139 💬 +Document AI (made it way before, for folks who want structured input/output and can fine-tune a model) https://huggingface.co/collections/merve/awesome-document-ai-65ef1cdc2e97ef9cc85c898e 📖","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/gRQUP4l8E5DzT2N-PeNYx.jpeg'}]",[],"[{'reaction': '👍', 'users': ['adorkin', 'web3builder', 'John6666', 'Percifal', 'jrmasiero', 'rwightman', 'seek007', 'abishekcodes', 'zliu', 'AI4Industry', 'louisbrulenaudet', 'byteprobe', 'muhtasham', 'rumbleFTW'], 'count': 14}, {'reaction': '🔥', 'users': ['umair894', 'abishekcodes', 'fsommers', 'jithinjames', 'rumbleFTW'], 'count': 5}, {'reaction': '❤️', 'users': ['Csplk', 'rumbleFTW', 'madstuntman11'], 'count': 3}]",2024-09-05 09:17:38,2024-09-21 20:09:39.856,"[{'_id': '6444b3135af87c73bbbd7447', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6444b3135af87c73bbbd7447/-WLquJY3E1KZSJbnYUkwD.jpeg', 'fullname': 'Frank Sommers', 'name': 'fsommers', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}]",/posts/merve/995511131459162,3902,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,866472607836541,"[{'type': 'text', 'value': ""🔥 Dataset Viber 0.3 launches with Synthesizer to synthesise data with a human in the loop, for free, using open source models with Argilla's distilabel but within a quick-and-easy Gradio Interface."", 'raw': ""🔥 Dataset Viber 0.3 launches with Synthesizer to synthesise data with a human in the loop, for free, using open source models with Argilla's distilabel but within a quick-and-easy Gradio Interface.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Why? Not trying to be all fancy and formal just to iterate on your data and to get familiar with your prompts and the produced data. Under the hood, it relies on Hugging Face Inference endpoints and the latest LLMs and VLMs like Meta Llama 3.1 and BlackForest Labs Flux models.', 'raw': 'Why? Not trying to be all fancy and formal just to iterate on your data and to get familiar with your prompts and the produced data. Under the hood, it relies on Hugging Face Inference endpoints and the latest LLMs and VLMs like Meta Llama 3.1 and BlackForest Labs Flux models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'An addition to the other Interfaces that are already support.', 'raw': 'An addition to the other Interfaces that are already support.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- CollectorInterface: Lazily collect data of model interactions without human annotation.', 'raw': '- CollectorInterface: Lazily collect data of model interactions without human annotation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AnnotatorInterface: Walk through your data and annotate it with models in the loop.', 'raw': '- AnnotatorInterface: Walk through your data and annotate it with models in the loop.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Synthesizer: Synthesize data with distilabel in the loop.', 'raw': '- Synthesizer: Synthesize data with distilabel in the loop.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- BulkInterface: Explore your data distribution and annotate in bulk.', 'raw': '- BulkInterface: Explore your data distribution and annotate in bulk.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⭐️ Give some good vibes: ', 'raw': '⭐️ Give some good vibes: '}, {'type': 'link', 'href': 'https://github.com/davidberenstein1957/dataset-viber', 'raw': 'https://github.com/davidberenstein1957/dataset-viber'}]","🔥 Dataset Viber 0.3 launches with Synthesizer to synthesise data with a human in the loop, for free, using open source models with Argilla's distilabel but within a quick-and-easy Gradio Interface. + +Why? Not trying to be all fancy and formal just to iterate on your data and to get familiar with your prompts and the produced data. Under the hood, it relies on Hugging Face Inference endpoints and the latest LLMs and VLMs like Meta Llama 3.1 and BlackForest Labs Flux models. + +An addition to the other Interfaces that are already support. +- CollectorInterface: Lazily collect data of model interactions without human annotation. +- AnnotatorInterface: Walk through your data and annotate it with models in the loop. +- Synthesizer: Synthesize data with distilabel in the loop. +- BulkInterface: Explore your data distribution and annotate in bulk. + +⭐️ Give some good vibes: https://github.com/davidberenstein1957/dataset-viber","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/634ff41ff32062e9eb7b06a3/hXo1fjJ_P7vCKo2brM5HW.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-09-05 08:48:03,2024-09-05 08:48:03.787,[],/posts/davidberenstein1957/866472607836541,293,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png,273.0,Ali El Filali,alielfilali01,579064956863993,"[{'type': 'text', 'value': 'Datapluck: Portability Tool for Huggingface Datasets', 'raw': 'Datapluck: Portability Tool for Huggingface Datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '""I found myself recently whipping up notebooks just to pull huggingface datasets locally, annotate or operate changes and update them again. This happened often enough that I made a cli tool out of it, which I\'ve been using successfully for the last few months.', 'raw': '""I found myself recently whipping up notebooks just to pull huggingface datasets locally, annotate or operate changes and update them again. This happened often enough that I made a cli tool out of it, which I\'ve been using successfully for the last few months.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'While huggingface uses open formats, I found the official toolchain relatively low-level and not adapted to quick operations such as what I am doing.""', 'raw': 'While huggingface uses open formats, I found the official toolchain relatively low-level and not adapted to quick operations such as what I am doing.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '~ ', 'raw': '~ '}, {'type': 'mention', 'user': 'omarkamali', 'raw': '@omarkamali'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link : ', 'raw': 'Link : '}, {'type': 'link', 'href': 'https://omarkama.li/blog/datapluck', 'raw': 'https://omarkama.li/blog/datapluck'}]","Datapluck: Portability Tool for Huggingface Datasets + +""I found myself recently whipping up notebooks just to pull huggingface datasets locally, annotate or operate changes and update them again. This happened often enough that I made a cli tool out of it, which I've been using successfully for the last few months. + +While huggingface uses open formats, I found the official toolchain relatively low-level and not adapted to quick operations such as what I am doing."" +~ @omarkamali + +Link : https://omarkama.li/blog/datapluck",[],"[{'_id': '665cc58d164b78e36b655f25', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/665cc58d164b78e36b655f25/yiyOVgR3YKe_qNa5xEmu-.jpeg', 'fullname': 'Omar Kamali', 'name': 'omarkamali', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10}]","[{'reaction': '❤️', 'users': ['omarkamali', 'abdeljalilELmajjodi', 'louisbrulenaudet'], 'count': 3}, {'reaction': '👀', 'users': ['John6666', 'async0x42'], 'count': 2}]",2024-09-05 04:13:36,2024-09-05 12:17:30.997,"[{'_id': '665cc58d164b78e36b655f25', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/665cc58d164b78e36b655f25/yiyOVgR3YKe_qNa5xEmu-.jpeg', 'fullname': 'Omar Kamali', 'name': 'omarkamali', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}]",/posts/alielfilali01/579064956863993,1106,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,290847981802358,"[{'type': 'text', 'value': 'Just wrapped up a deep dive into the latest lecture on building LLMs, such as ChatGPT, from ', 'raw': 'Just wrapped up a deep dive into the latest lecture on building LLMs, such as ChatGPT, from '}, {'type': 'mention', 'user': 'Stanford', 'raw': '@Stanford'}, {'type': 'text', 'value': ' CS229 course. Here are my top takeaways:', 'raw': ' CS229 course. Here are my top takeaways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Understanding the Components: LLMs like ChatGPT, Claude, and others are more than just neural networks; they are a complex blend of architecture, training loss, data evaluation, and systems. Knowing how these components work together is key to improving and scaling these models.', 'raw': '🔍 Understanding the Components: LLMs like ChatGPT, Claude, and others are more than just neural networks; they are a complex blend of architecture, training loss, data evaluation, and systems. Knowing how these components work together is key to improving and scaling these models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Scaling Matters: Performance improves predictably with more data, bigger models, and greater computational power. However, balancing these factors is crucial to avoid overfitting and resource waste.', 'raw': '📊 Scaling Matters: Performance improves predictably with more data, bigger models, and greater computational power. However, balancing these factors is crucial to avoid overfitting and resource waste.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📈 Data is King: LLMs are trained on trillions of tokens scraped from the internet, but the quality of this data matters immensely. Rigorous filtering and deduplication processes are essential to maintaining data integrity.', 'raw': '📈 Data is King: LLMs are trained on trillions of tokens scraped from the internet, but the quality of this data matters immensely. Rigorous filtering and deduplication processes are essential to maintaining data integrity.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏗️ Pre-Training vs. Post-Training: While pre-training equips the model with general knowledge, post-training (like RLHF) fine-tunes it to follow human-like responses, reducing toxic outputs and improving alignment with human values.', 'raw': '🏗️ Pre-Training vs. Post-Training: While pre-training equips the model with general knowledge, post-training (like RLHF) fine-tunes it to follow human-like responses, reducing toxic outputs and improving alignment with human values.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Reinforcement Learning from Human Feedback (RLHF): This technique allows LLMs to maximize outputs that align with human preferences, making models more reliable and accurate.', 'raw': '🌐 Reinforcement Learning from Human Feedback (RLHF): This technique allows LLMs to maximize outputs that align with human preferences, making models more reliable and accurate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 Why It Matters: Understanding these processes not only helps us appreciate the complexity behind our everyday AI tools but also highlights the challenges and opportunities in the ever-evolving field of AI.', 'raw': '💡 Why It Matters: Understanding these processes not only helps us appreciate the complexity behind our everyday AI tools but also highlights the challenges and opportunities in the ever-evolving field of AI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Whether you’re in tech, data science, or just AI-curious, staying updated on these advancements is crucial. LLMs are not just transforming industries; they’re redefining the future of human-computer interaction!', 'raw': 'Whether you’re in tech, data science, or just AI-curious, staying updated on these advancements is crucial. LLMs are not just transforming industries; they’re redefining the future of human-computer interaction!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I just realized this was almost 2 hours long...', 'raw': 'I just realized this was almost 2 hours long...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link: ', 'raw': 'Link: '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=9vM4p9NN0Ts', 'raw': 'https://www.youtube.com/watch?v=9vM4p9NN0Ts'}]","Just wrapped up a deep dive into the latest lecture on building LLMs, such as ChatGPT, from @Stanford CS229 course. Here are my top takeaways: + +🔍 Understanding the Components: LLMs like ChatGPT, Claude, and others are more than just neural networks; they are a complex blend of architecture, training loss, data evaluation, and systems. Knowing how these components work together is key to improving and scaling these models. + +📊 Scaling Matters: Performance improves predictably with more data, bigger models, and greater computational power. However, balancing these factors is crucial to avoid overfitting and resource waste. + +📈 Data is King: LLMs are trained on trillions of tokens scraped from the internet, but the quality of this data matters immensely. Rigorous filtering and deduplication processes are essential to maintaining data integrity. + +🏗️ Pre-Training vs. Post-Training: While pre-training equips the model with general knowledge, post-training (like RLHF) fine-tunes it to follow human-like responses, reducing toxic outputs and improving alignment with human values. + +🌐 Reinforcement Learning from Human Feedback (RLHF): This technique allows LLMs to maximize outputs that align with human preferences, making models more reliable and accurate. + +💡 Why It Matters: Understanding these processes not only helps us appreciate the complexity behind our everyday AI tools but also highlights the challenges and opportunities in the ever-evolving field of AI. + +Whether you’re in tech, data science, or just AI-curious, staying updated on these advancements is crucial. LLMs are not just transforming industries; they’re redefining the future of human-computer interaction! + +I just realized this was almost 2 hours long... + +Link: https://www.youtube.com/watch?v=9vM4p9NN0Ts","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/rLGOeupSDU6QEGWMEkQuB.png'}]",[],"[{'reaction': '❤️', 'users': ['dongnt', 'alielfilali01', 'Joseph717171', 'dsmonk', 'louisbrulenaudet'], 'count': 5}, {'reaction': '👀', 'users': ['John6666', 'Joseph717171'], 'count': 2}, {'reaction': '👍', 'users': ['lamhieu'], 'count': 1}]",2024-09-04 21:37:25,2024-09-06 10:00:29.344,"[{'_id': '6569216f9c96f1a47bf45788', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6569216f9c96f1a47bf45788/mCLqmAs4dOjKdxNQVAp1w.png', 'fullname': 'Sica Rius', 'name': 'SicariusSicariiStuff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 388, 'isFollowing': False}, {'_id': '63c1dfd4a0ffa3857eb362a9', 'avatarUrl': '/avatars/ea4398745974d781ae9dc0e95b12cabe.svg', 'fullname': 'Joseph', 'name': 'Joseph717171', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 29, 'isFollowing': False}, {'_id': '65a50d71c4034f4ed7b55364', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65a50d71c4034f4ed7b55364/JCv9wLsnjT24SsL27GeFo.png', 'fullname': 'Logical Argument', 'name': 'WbjuSrceu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/singhsidhukuldeep/290847981802358,1637,,3 +/avatars/bbaffa3a6cfe0fc224d02d4dc8454886.svg,1.0,Cao Trong Thang,fptisthebest,993063646272657,"[{'type': 'text', 'value': ""I just bought HF Pro but i don't know how many request per month i can get, if i request 1 time every 5s, around 2k token, is the pro account enough?, thanks for reading"", 'raw': ""I just bought HF Pro but i don't know how many request per month i can get, if i request 1 time every 5s, around 2k token, is the pro account enough?, thanks for reading""}]","I just bought HF Pro but i don't know how many request per month i can get, if i request 1 time every 5s, around 2k token, is the pro account enough?, thanks for reading",[],[],"[{'reaction': '👀', 'users': ['John6666', 'fptisthebest', 'davidberenstein1957', 'Tonic'], 'count': 4}]",2024-09-04 21:19:51,2024-09-05 02:40:17.234,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/fptisthebest/993063646272657,853,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6455cc8d679315e4ef16fbec/M6Cfifn05BUzkCFd2QDIT.png,159.0,Tim Dolan,macadeliccc,254051507992365,"[{'type': 'text', 'value': 'My tool calling playgrounds repo has been updated again to include the use of flux1-schnell or dev image generation. This functionality is similar to using Dall-E 3 via the ', 'raw': 'My tool calling playgrounds repo has been updated again to include the use of flux1-schnell or dev image generation. This functionality is similar to using Dall-E 3 via the '}, {'type': 'inline_code', 'code': '@', 'raw': '`@`'}, {'type': 'text', 'value': ' decorator in ChatGPT. Once the function is selected, the model will either extract or improve your prompt (depending on how you ask).', 'raw': ' decorator in ChatGPT. Once the function is selected, the model will either extract or improve your prompt (depending on how you ask).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have also included 2 notebooks that cover different ways to access Flux for your specific use case. The first method covers how to access flux via LitServe from Lightning AI. LitServe is a bare-bones inference engine with a focus on modularity rather than raw performance. LitServe supports text generation models as well as image generation, which is great for some use cases, but does not provide the caching mechanisms from a dedicated image generation solution. ', 'raw': 'I have also included 2 notebooks that cover different ways to access Flux for your specific use case. The first method covers how to access flux via LitServe from Lightning AI. LitServe is a bare-bones inference engine with a focus on modularity rather than raw performance. LitServe supports text generation models as well as image generation, which is great for some use cases, but does not provide the caching mechanisms from a dedicated image generation solution. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Since dedicated caching mechanisms are so crucial to performance, I also included an example for how to integrate SwarmUI/ComfyUI to utilize a more dedicated infrastructure that may already be running as part of your tech stack. Resulting in a Llama-3.1 capable of utilizing specific ComfyUI JSON configs, and many different settings. ', 'raw': 'Since dedicated caching mechanisms are so crucial to performance, I also included an example for how to integrate SwarmUI/ComfyUI to utilize a more dedicated infrastructure that may already be running as part of your tech stack. Resulting in a Llama-3.1 capable of utilizing specific ComfyUI JSON configs, and many different settings. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lastly, I tested the response times for each over a small batch request to simulate a speed test.', 'raw': 'Lastly, I tested the response times for each over a small batch request to simulate a speed test.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It becomes clear quickly how efficient caching mechanisms can greatly reduce the generation time, even in a scenario where another model is called. An average 4.5 second response time is not bad at all when you consider that an 8B model is calling a 12B parameter model for a secondary generation.', 'raw': 'It becomes clear quickly how efficient caching mechanisms can greatly reduce the generation time, even in a scenario where another model is called. An average 4.5 second response time is not bad at all when you consider that an 8B model is calling a 12B parameter model for a secondary generation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Repo: ', 'raw': 'Repo: '}, {'type': 'link', 'href': 'https://github.com/tdolan21/tool-calling-playground', 'raw': 'https://github.com/tdolan21/tool-calling-playground'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LitServe: ', 'raw': 'LitServe: '}, {'type': 'link', 'href': 'https://github.com/Lightning-AI/LitServe', 'raw': 'https://github.com/Lightning-AI/LitServe'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SwarmUI: ', 'raw': 'SwarmUI: '}, {'type': 'link', 'href': 'https://github.com/mcmonkeyprojects/SwarmUI', 'raw': 'https://github.com/mcmonkeyprojects/SwarmUI'}]","My tool calling playgrounds repo has been updated again to include the use of flux1-schnell or dev image generation. This functionality is similar to using Dall-E 3 via the `@` decorator in ChatGPT. Once the function is selected, the model will either extract or improve your prompt (depending on how you ask). + +I have also included 2 notebooks that cover different ways to access Flux for your specific use case. The first method covers how to access flux via LitServe from Lightning AI. LitServe is a bare-bones inference engine with a focus on modularity rather than raw performance. LitServe supports text generation models as well as image generation, which is great for some use cases, but does not provide the caching mechanisms from a dedicated image generation solution. + +Since dedicated caching mechanisms are so crucial to performance, I also included an example for how to integrate SwarmUI/ComfyUI to utilize a more dedicated infrastructure that may already be running as part of your tech stack. Resulting in a Llama-3.1 capable of utilizing specific ComfyUI JSON configs, and many different settings. + +Lastly, I tested the response times for each over a small batch request to simulate a speed test. + +It becomes clear quickly how efficient caching mechanisms can greatly reduce the generation time, even in a scenario where another model is called. An average 4.5 second response time is not bad at all when you consider that an 8B model is calling a 12B parameter model for a secondary generation. + +Repo: https://github.com/tdolan21/tool-calling-playground +LitServe: https://github.com/Lightning-AI/LitServe +SwarmUI: https://github.com/mcmonkeyprojects/SwarmUI","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6455cc8d679315e4ef16fbec/Fhl8PQ2daHSCs9bQkvRTo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6455cc8d679315e4ef16fbec/Fo3QQLzYVJMT-eqKxxUAX.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '🔥', 'users': ['louisbrulenaudet'], 'count': 1}]",2024-09-04 17:01:20,2024-09-04 17:01:20.418,[],/posts/macadeliccc/254051507992365,1145,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,267778050099092,"[{'type': 'text', 'value': '🥳 𝗧𝗿𝗮𝗻𝘀𝗳𝗼𝗿𝗺𝗲𝗿𝘀 𝗔𝗴𝗲𝗻𝘁𝘀 𝗻𝗼𝘄 𝘀𝘂𝗽𝗽𝗼𝗿𝘁𝘀 𝗠𝘂𝗹𝘁𝗶-𝗮𝗴𝗲𝗻𝘁 𝘀𝘆𝘀𝘁𝗲𝗺𝘀!', 'raw': '🥳 𝗧𝗿𝗮𝗻𝘀𝗳𝗼𝗿𝗺𝗲𝗿𝘀 𝗔𝗴𝗲𝗻𝘁𝘀 𝗻𝗼𝘄 𝘀𝘂𝗽𝗽𝗼𝗿𝘁𝘀 𝗠𝘂𝗹𝘁𝗶-𝗮𝗴𝗲𝗻𝘁 𝘀𝘆𝘀𝘁𝗲𝗺𝘀!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Multi-agent systems have been introduced in Microsoft's framework Autogen. It simply means having several agents working together to solve your task instead of only one : this paradigm empirically yields better performance on most benchmarks. The reason for this better performance is conceptually simple: for many tasks, rather than using a do-it-all system, you would prefer to specialize units on sub-tasks. Here, having agents with separate tool sets and memories allows to achieve efficient specialization."", 'raw': ""Multi-agent systems have been introduced in Microsoft's framework Autogen. It simply means having several agents working together to solve your task instead of only one : this paradigm empirically yields better performance on most benchmarks. The reason for this better performance is conceptually simple: for many tasks, rather than using a do-it-all system, you would prefer to specialize units on sub-tasks. Here, having agents with separate tool sets and memories allows to achieve efficient specialization.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can now easily build hierarchical multi-agent systems with transformers.agents (not released yet, use the dev version)', 'raw': 'You can now easily build hierarchical multi-agent systems with transformers.agents (not released yet, use the dev version)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""To do so, encapsulate the agent in a ManagedAgent object. This object needs arguments agent, name, and a description, which will then be embedded in the manager agent's system prompt to let it know how to call this managed agent, as we also do for tools."", 'raw': ""To do so, encapsulate the agent in a ManagedAgent object. This object needs arguments agent, name, and a description, which will then be embedded in the manager agent's system prompt to let it know how to call this managed agent, as we also do for tools.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Cf the example in the image! We'll keep building on this paradigm in the upcoming weeks 🚀"", 'raw': ""Cf the example in the image! We'll keep building on this paradigm in the upcoming weeks 🚀""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read more in the doc 👉 ', 'raw': 'Read more in the doc 👉 '}, {'type': 'link', 'href': 'https://github.com/huggingface/transformers/blob/main/docs/source/en/agents_advanced.md', 'raw': 'https://github.com/huggingface/transformers/blob/main/docs/source/en/agents_advanced.md'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Checkout an advanced multi-agent system that tops the GAIA leaderboard 👉 ', 'raw': 'Checkout an advanced multi-agent system that tops the GAIA leaderboard 👉 '}, {'type': 'link', 'href': 'https://github.com/aymeric-roucher/GAIA/blob/main/gaia_multiagent.py', 'raw': 'https://github.com/aymeric-roucher/GAIA/blob/main/gaia_multiagent.py'}]","🥳 𝗧𝗿𝗮𝗻𝘀𝗳𝗼𝗿𝗺𝗲𝗿𝘀 𝗔𝗴𝗲𝗻𝘁𝘀 𝗻𝗼𝘄 𝘀𝘂𝗽𝗽𝗼𝗿𝘁𝘀 𝗠𝘂𝗹𝘁𝗶-𝗮𝗴𝗲𝗻𝘁 𝘀𝘆𝘀𝘁𝗲𝗺𝘀! + +Multi-agent systems have been introduced in Microsoft's framework Autogen. It simply means having several agents working together to solve your task instead of only one : this paradigm empirically yields better performance on most benchmarks. The reason for this better performance is conceptually simple: for many tasks, rather than using a do-it-all system, you would prefer to specialize units on sub-tasks. Here, having agents with separate tool sets and memories allows to achieve efficient specialization. + +You can now easily build hierarchical multi-agent systems with transformers.agents (not released yet, use the dev version) + +To do so, encapsulate the agent in a ManagedAgent object. This object needs arguments agent, name, and a description, which will then be embedded in the manager agent's system prompt to let it know how to call this managed agent, as we also do for tools. + +Cf the example in the image! We'll keep building on this paradigm in the upcoming weeks 🚀 + +Read more in the doc 👉 https://github.com/huggingface/transformers/blob/main/docs/source/en/agents_advanced.md + +Checkout an advanced multi-agent system that tops the GAIA leaderboard 👉 https://github.com/aymeric-roucher/GAIA/blob/main/gaia_multiagent.py","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/zE2JkQiVNMx9HS_vs1NWd.png'}]",[],"[{'reaction': '👍', 'users': ['ibrahim313', 'John6666', 'osanseviero', 'Kaoeiri', 'dsmonk', 'Csplk', 'KingNish', 'whitebill', 'Winnougan'], 'count': 9}, {'reaction': '🤗', 'users': ['louisbrulenaudet', 'Kaoeiri', 'KingNish'], 'count': 3}]",2024-09-04 16:49:06,2024-09-04 16:49:06.292,[],/posts/m-ric/267778050099092,2143,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,169227177418296,"[{'type': 'text', 'value': 'the new version of Enigma, our code-instruct specialist, is out now:', 'raw': 'the new version of Enigma, our code-instruct specialist, is out now:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ValiantLabs/Llama3.1-8B-Enigma'}, 'url': 'https://huggingface.co/ValiantLabs/Llama3.1-8B-Enigma', 'raw': 'https://huggingface.co/ValiantLabs/Llama3.1-8B-Enigma'}, {'type': 'text', 'value': ' is trained on code-instruct and general chat data.', 'raw': ' is trained on code-instruct and general chat data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- the updated code-instruct database is available now as well: ', 'raw': '- the updated code-instruct database is available now as well: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'sequelbox/Tachibana'}, 'url': 'https://huggingface.co/datasets/sequelbox/Tachibana', 'raw': 'https://huggingface.co/datasets/sequelbox/Tachibana'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'more to come soon!', 'raw': 'more to come soon!'}]","the new version of Enigma, our code-instruct specialist, is out now: +- https://huggingface.co/ValiantLabs/Llama3.1-8B-Enigma is trained on code-instruct and general chat data. +- the updated code-instruct database is available now as well: https://huggingface.co/datasets/sequelbox/Tachibana + +more to come soon!",[],[],"[{'reaction': '👀', 'users': ['John6666', 'djuna'], 'count': 2}]",2024-09-04 16:23:27,2024-09-04 16:23:27.875,[],/posts/sequelbox/169227177418296,713,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/642827944fe87caede802784/a7s3Ub9Cy6-PuuaX8wwXm.png,83.0,VILARIN,vilarin,317300660282714,"[{'type': 'text', 'value': '🐣Ai2 Releasing OLMoE! ', 'raw': '🐣Ai2 Releasing OLMoE! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'OLMoE-1B-7B-Instruct is a Mixture-of-Experts LLM with 1B active and 7B total parameters, and, OLMoE is 100% open-source in model, code-base, datasets!', 'raw': 'OLMoE-1B-7B-Instruct is a Mixture-of-Experts LLM with 1B active and 7B total parameters, and, OLMoE is 100% open-source in model, code-base, datasets!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦖Paper: ', 'raw': '🦖Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2409.02060', 'raw': 'https://arxiv.org/abs/2409.02060'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗Model: ', 'raw': '🤗Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'allenai/OLMoE-1B-7B-0924-Instruct'}, 'url': 'https://huggingface.co/allenai/OLMoE-1B-7B-0924-Instruct', 'raw': 'https://huggingface.co/allenai/OLMoE-1B-7B-0924-Instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💾Datasets: ', 'raw': '💾Datasets: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'allenai/OLMoE-mix-0924'}, 'url': 'https://huggingface.co/datasets/allenai/OLMoE-mix-0924', 'raw': 'https://huggingface.co/datasets/allenai/OLMoE-mix-0924'}]","🐣Ai2 Releasing OLMoE! +OLMoE-1B-7B-Instruct is a Mixture-of-Experts LLM with 1B active and 7B total parameters, and, OLMoE is 100% open-source in model, code-base, datasets! + +🦖Paper: https://arxiv.org/abs/2409.02060 + +🤗Model: https://huggingface.co/allenai/OLMoE-1B-7B-0924-Instruct + +💾Datasets: https://huggingface.co/datasets/allenai/OLMoE-mix-0924",[],[],"[{'reaction': '👀', 'users': ['orrinin', 'YaTharThShaRma999', 'John6666', 'osanseviero', 'den0620', 'louisbrulenaudet'], 'count': 6}, {'reaction': '🚀', 'users': ['sequelbox'], 'count': 1}]",2024-09-04 15:48:41,2024-09-06 08:54:44.424,[],/posts/vilarin/317300660282714,1634,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png,159.0,Richard A Aragon,TuringsSolutions,914900735326223,"[{'type': 'text', 'value': ""The word 'Lead' has three definitions. When an LLM model tokenizes this word, it is always the same token. Imagine being able to put any particular embedding at any particular time into a 'Quantum State'. When an Embedding is in a Quantum State, the word token could have up to 3 different meanings (x1, x2, x3). The Quantum State gets collapsed based on the individual context surrounding the word. 'Jill lead Joy to the store' would collapse to x1. 'Jill and Joy stumbled upon a pile of lead' would collapse to x3. Very simple, right? This method produces OFF THE CHARTS results:"", 'raw': ""The word 'Lead' has three definitions. When an LLM model tokenizes this word, it is always the same token. Imagine being able to put any particular embedding at any particular time into a 'Quantum State'. When an Embedding is in a Quantum State, the word token could have up to 3 different meanings (x1, x2, x3). The Quantum State gets collapsed based on the individual context surrounding the word. 'Jill lead Joy to the store' would collapse to x1. 'Jill and Joy stumbled upon a pile of lead' would collapse to x3. Very simple, right? This method produces OFF THE CHARTS results:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=tuQI6A-EOqE', 'raw': 'https://www.youtube.com/watch?v=tuQI6A-EOqE'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","The word 'Lead' has three definitions. When an LLM model tokenizes this word, it is always the same token. Imagine being able to put any particular embedding at any particular time into a 'Quantum State'. When an Embedding is in a Quantum State, the word token could have up to 3 different meanings (x1, x2, x3). The Quantum State gets collapsed based on the individual context surrounding the word. 'Jill lead Joy to the store' would collapse to x1. 'Jill and Joy stumbled upon a pile of lead' would collapse to x3. Very simple, right? This method produces OFF THE CHARTS results: + + +https://www.youtube.com/watch?v=tuQI6A-EOqE ",[],[],"[{'reaction': '🧠', 'users': ['maximuspowers', 'maier-s', 'nicolollo'], 'count': 3}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-08-29 21:39:19,2024-08-29 21:39:19.548,[],/posts/TuringsSolutions/914900735326223,1413,,0 +/avatars/a692e2e2a3b0222e2f8cdfc44ac8d64c.svg,29.0,its5Q,its5Q,230212031259808,"[{'type': 'text', 'value': 'Continuing my streak by releasing the Wikireading dataset: a large collection of scraped non-fiction books predominantly in Russian language.', 'raw': 'Continuing my streak by releasing the Wikireading dataset: a large collection of scraped non-fiction books predominantly in Russian language.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'its5Q/wikireading'}, 'url': 'https://huggingface.co/datasets/its5Q/wikireading', 'raw': 'https://huggingface.co/datasets/its5Q/wikireading'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's the highlights:"", 'raw': ""Here's the highlights:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ~7B tokens, or ~28B characters, making it a great candidate for use in pretraining', 'raw': '- ~7B tokens, or ~28B characters, making it a great candidate for use in pretraining'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Contains non-fiction works from many knowledge domains', 'raw': '- Contains non-fiction works from many knowledge domains'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Includes both the original HTML and extracted text of book chapters', 'raw': '- Includes both the original HTML and extracted text of book chapters'}]","Continuing my streak by releasing the Wikireading dataset: a large collection of scraped non-fiction books predominantly in Russian language. +https://huggingface.co/datasets/its5Q/wikireading + +Here's the highlights: +- ~7B tokens, or ~28B characters, making it a great candidate for use in pretraining +- Contains non-fiction works from many knowledge domains +- Includes both the original HTML and extracted text of book chapters",[],[],"[{'reaction': '👍', 'users': ['lukmanaj', 'clem', 'kristaller486', 'nyuuzyou', 'C0d3B0dy'], 'count': 5}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '❤️', 'users': ['clem'], 'count': 1}]",2024-08-29 18:36:41,2024-08-29 18:36:41.732,[],/posts/its5Q/230212031259808,1501,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/651d4e73acd8e9168ac92b04/WMYCWKx9MM8Xxj8vXursD.png,,Jonah Ramponi,jonah-ramponi,672761214253429,"[{'type': 'text', 'value': 'Thought this was an interesting graphic from the EAGLE blog post. It made me wonder if certain sampling methods have been shown to work better for certain tasks.', 'raw': 'Thought this was an interesting graphic from the EAGLE blog post. It made me wonder if certain sampling methods have been shown to work better for certain tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Does anyone know of any work looking at trends in the output token probability distribution by task type? (or similar) ', 'raw': 'Does anyone know of any work looking at trends in the output token probability distribution by task type? (or similar) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Source: ', 'raw': 'Source: '}, {'type': 'link', 'href': 'https://sites.google.com/view/eagle-llm', 'raw': 'https://sites.google.com/view/eagle-llm'}]","Thought this was an interesting graphic from the EAGLE blog post. It made me wonder if certain sampling methods have been shown to work better for certain tasks. + +Does anyone know of any work looking at trends in the output token probability distribution by task type? (or similar) + +Source: https://sites.google.com/view/eagle-llm","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/651d4e73acd8e9168ac92b04/775TUAesRzcshWIVKmo_G.png'}]",[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-08-29 18:06:07,2024-08-29 18:06:44.888,[],/posts/jonah-ramponi/672761214253429,502,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6455cc8d679315e4ef16fbec/M6Cfifn05BUzkCFd2QDIT.png,159.0,Tim Dolan,macadeliccc,858442795091051,"[{'type': 'text', 'value': 'Automated web scraping with playwright is becoming easier by the day. Now, using ollama tool calling, its possible to perform very high accuracy web scraping (in some cases 100% accurate) through just asking an LLM to scrape the content for you. ', 'raw': 'Automated web scraping with playwright is becoming easier by the day. Now, using ollama tool calling, its possible to perform very high accuracy web scraping (in some cases 100% accurate) through just asking an LLM to scrape the content for you. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This can be completed in a multistep process similar to cohere's platform. If you have tried the cohere playground with web scraping, this will feel very similar. In my experience, the Llama 3.1 version is much better due to the larger context window. Both tools are great, but the difference is the ollama + playwright version is completely controlled by you. "", 'raw': ""This can be completed in a multistep process similar to cohere's platform. If you have tried the cohere playground with web scraping, this will feel very similar. In my experience, the Llama 3.1 version is much better due to the larger context window. Both tools are great, but the difference is the ollama + playwright version is completely controlled by you. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All you need to do is wrap your scraper in a function:', 'raw': 'All you need to do is wrap your scraper in a function:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ' async def query_web_scraper(url: str) -> dict:\n scraper = WebScraper(headless=False)\n return await scraper.query_page_content(url)', 'raw': '```\n async def query_web_scraper(url: str) -> dict:\n scraper = WebScraper(headless=False)\n return await scraper.query_page_content(url)\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and then make your request:', 'raw': 'and then make your request:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""# First API call: Send the query and function description to the model\nresponse = ollama.chat(\n model=model,\n messages=messages,\n tools=[\n {\n 'type': 'function',\n 'function': {\n 'name': 'query_web_scraper',\n 'description': 'Scrapes the content of a web page and returns the structured JSON object with titles, articles, and associated links.',\n 'parameters': {\n 'type': 'object',\n 'properties': {\n 'url': {\n 'type': 'string',\n 'description': 'The URL of the web page to scrape.',\n },\n },\n 'required': ['url'],\n },\n },\n },\n ]\n)"", 'raw': ""```\n# First API call: Send the query and function description to the model\nresponse = ollama.chat(\n model=model,\n messages=messages,\n tools=[\n {\n 'type': 'function',\n 'function': {\n 'name': 'query_web_scraper',\n 'description': 'Scrapes the content of a web page and returns the structured JSON object with titles, articles, and associated links.',\n 'parameters': {\n 'type': 'object',\n 'properties': {\n 'url': {\n 'type': 'string',\n 'description': 'The URL of the web page to scrape.',\n },\n },\n 'required': ['url'],\n },\n },\n },\n ]\n)\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To learn more:', 'raw': 'To learn more:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Github w/ Playground: ', 'raw': 'Github w/ Playground: '}, {'type': 'link', 'href': 'https://github.com/tdolan21/tool-calling-playground/blob/main/notebooks/ollama-playwright-web-scraping.ipynb', 'raw': 'https://github.com/tdolan21/tool-calling-playground/blob/main/notebooks/ollama-playwright-web-scraping.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Complete Guide: ', 'raw': 'Complete Guide: '}, {'type': 'link', 'href': 'https://medium.com/@tdolan21/building-an-llm-powered-web-scraper-with-ollama-and-playwright-6274d5d938b5', 'raw': 'https://medium.com/@tdolan21/building-an-llm-powered-web-scraper-with-ollama-and-playwright-6274d5d938b5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Automated web scraping with playwright is becoming easier by the day. Now, using ollama tool calling, its possible to perform very high accuracy web scraping (in some cases 100% accurate) through just asking an LLM to scrape the content for you. + +This can be completed in a multistep process similar to cohere's platform. If you have tried the cohere playground with web scraping, this will feel very similar. In my experience, the Llama 3.1 version is much better due to the larger context window. Both tools are great, but the difference is the ollama + playwright version is completely controlled by you. + +All you need to do is wrap your scraper in a function: + +``` + async def query_web_scraper(url: str) -> dict: + scraper = WebScraper(headless=False) + return await scraper.query_page_content(url) +``` + +and then make your request: + +``` +# First API call: Send the query and function description to the model +response = ollama.chat( + model=model, + messages=messages, + tools=[ + { + 'type': 'function', + 'function': { + 'name': 'query_web_scraper', + 'description': 'Scrapes the content of a web page and returns the structured JSON object with titles, articles, and associated links.', + 'parameters': { + 'type': 'object', + 'properties': { + 'url': { + 'type': 'string', + 'description': 'The URL of the web page to scrape.', + }, + }, + 'required': ['url'], + }, + }, + }, + ] +) +``` + +To learn more: +Github w/ Playground: https://github.com/tdolan21/tool-calling-playground/blob/main/notebooks/ollama-playwright-web-scraping.ipynb +Complete Guide: https://medium.com/@tdolan21/building-an-llm-powered-web-scraper-with-ollama-and-playwright-6274d5d938b5 + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6455cc8d679315e4ef16fbec/hVNJY2mBa3mNtCXWFGaKf.mp4'}]",[],"[{'reaction': '👍', 'users': ['RalFinger', 'xsa-dev', 'wsuff', 'alielfilali01', 'Bruhn'], 'count': 5}, {'reaction': '👀', 'users': ['John6666', 'alielfilali01', 'louisbrulenaudet'], 'count': 3}]",2024-08-29 16:24:10,2024-08-29 18:52:11.663,[],/posts/macadeliccc/858442795091051,2132,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,676807179049479,"[{'type': 'text', 'value': 'Simplified implementation of “Neural Networks are Decision Trees”.', 'raw': 'Simplified implementation of “Neural Networks are Decision Trees”.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Showing that any neural network with any activation function can be represented as a decision tree. Since decision trees are inherently interpretable, their equivalence helps us understand how the network makes decisions.', 'raw': 'Showing that any neural network with any activation function can be represented as a decision tree. Since decision trees are inherently interpretable, their equivalence helps us understand how the network makes decisions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this implementation, we trained a simple neural network for 1k epochs on makemoons, saved the trained weights (state dicts), extracted the decision tree equivalent from the trained weight then visualize and evaluate.', 'raw': 'In this implementation, we trained a simple neural network for 1k epochs on makemoons, saved the trained weights (state dicts), extracted the decision tree equivalent from the trained weight then visualize and evaluate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/Jaykef/ai-algorithms/blob/main/nns_are%20decision_trees.ipynb', 'raw': 'https://github.com/Jaykef/ai-algorithms/blob/main/nns_are%20decision_trees.ipynb'}]","Simplified implementation of “Neural Networks are Decision Trees”. + +Showing that any neural network with any activation function can be represented as a decision tree. Since decision trees are inherently interpretable, their equivalence helps us understand how the network makes decisions. + +In this implementation, we trained a simple neural network for 1k epochs on makemoons, saved the trained weights (state dicts), extracted the decision tree equivalent from the trained weight then visualize and evaluate. + +Code: https://github.com/Jaykef/ai-algorithms/blob/main/nns_are%20decision_trees.ipynb","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/6nh24JDkHq7mrrImKoPkz.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/DIk59sBCsWQKZsHIPREMA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/hMMlU3QzSO7ELzoU0Kre2.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/2YXz5KFV8-OAhdreRFciY.png'}]",[],"[{'reaction': '🧠', 'users': ['prithivMLmods', 'John6666', 'maier-s', 'jsulz', 'AtAndDev'], 'count': 5}, {'reaction': '🔥', 'users': ['rajveer43', 'AtAndDev'], 'count': 2}]",2024-08-29 14:44:28,2024-08-29 21:52:51.847,"[{'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}]",/posts/Jaward/676807179049479,1336,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png,1076.0,John Smith,John6666,325819090005748,"[{'type': 'mention', 'user': 'victor', 'raw': '@victor'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Excuse me.', 'raw': 'Excuse me.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I would like to report the following bug or new specification that is probably the cause of the fatal stacks that are occurring in the Zero GPU space throughout HF.', 'raw': 'I would like to report the following bug or new specification that is probably the cause of the fatal stacks that are occurring in the Zero GPU space throughout HF.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks.', 'raw': 'Thanks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'zero-gpu-explorers/README', 'discussionNum': 104}, 'url': 'https://huggingface.co/spaces/zero-gpu-explorers/README/discussions/104', 'raw': 'https://huggingface.co/spaces/zero-gpu-explorers/README/discussions/104'}]","@victor + +Excuse me. +I would like to report the following bug or new specification that is probably the cause of the fatal stacks that are occurring in the Zero GPU space throughout HF. +Thanks. + +https://huggingface.co/spaces/zero-gpu-explorers/README/discussions/104",[],"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949}]","[{'reaction': '👀', 'users': ['victor'], 'count': 1}]",2024-08-29 13:56:20,2024-09-06 14:59:07.538,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/John6666/325819090005748,2821,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,396620243063150,"[{'type': 'text', 'value': 'NVIDIA just dropped NVEagle 🦅', 'raw': 'NVIDIA just dropped NVEagle 🦅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Super impressive vision language model that comes in 7B, 13B and 13B fine-tuned on chat 💬', 'raw': 'Super impressive vision language model that comes in 7B, 13B and 13B fine-tuned on chat 💬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model repositories: ', 'raw': 'Model repositories: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/nveagle-66d0705108582d73bb235c26'}, 'url': 'https://huggingface.co/collections/merve/nveagle-66d0705108582d73bb235c26', 'raw': 'https://huggingface.co/collections/merve/nveagle-66d0705108582d73bb235c26'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it: ', 'raw': 'Try it: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'NVEagle/Eagle-X5-13B-Chat'}, 'url': 'https://huggingface.co/spaces/NVEagle/Eagle-X5-13B-Chat', 'raw': 'https://huggingface.co/spaces/NVEagle/Eagle-X5-13B-Chat'}, {'type': 'text', 'value': ' 💬 (works very well! 🤯)', 'raw': ' 💬 (works very well! 🤯)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This model essentially explores having different experts (MoE) for image encoder part of vision language model. ', 'raw': 'This model essentially explores having different experts (MoE) for image encoder part of vision language model. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How? 🧐', 'raw': 'How? 🧐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors concatenate the vision encoder output tokens together, and they apply ""pre-alignment"" essentially fine-tune experts with frozen text encoder. ', 'raw': 'The authors concatenate the vision encoder output tokens together, and they apply ""pre-alignment"" essentially fine-tune experts with frozen text encoder. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then they freeze both experts and the decoder and just train the projection layer, and finally, they unfreeze everything for supervised fine-tuning ✨', 'raw': 'Then they freeze both experts and the decoder and just train the projection layer, and finally, they unfreeze everything for supervised fine-tuning ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the paper, they explore different fusion strategies and vision encoders, extending basic CLIP encoder, and figure out simply concatenating visual tokens works well.', 'raw': 'In the paper, they explore different fusion strategies and vision encoders, extending basic CLIP encoder, and figure out simply concatenating visual tokens works well.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Rest of the architecture is quite similar to LLaVA. (see below the architecture) ', 'raw': 'Rest of the architecture is quite similar to LLaVA. (see below the architecture) '}]","NVIDIA just dropped NVEagle 🦅 + +Super impressive vision language model that comes in 7B, 13B and 13B fine-tuned on chat 💬 +Model repositories: https://huggingface.co/collections/merve/nveagle-66d0705108582d73bb235c26 +Try it: https://huggingface.co/spaces/NVEagle/Eagle-X5-13B-Chat 💬 (works very well! 🤯) + +This model essentially explores having different experts (MoE) for image encoder part of vision language model. +How? 🧐 +The authors concatenate the vision encoder output tokens together, and they apply ""pre-alignment"" essentially fine-tune experts with frozen text encoder. + +Then they freeze both experts and the decoder and just train the projection layer, and finally, they unfreeze everything for supervised fine-tuning ✨ + +In the paper, they explore different fusion strategies and vision encoders, extending basic CLIP encoder, and figure out simply concatenating visual tokens works well. +Rest of the architecture is quite similar to LLaVA. (see below the architecture) ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/jOo9XXc-t6CNA07Aca83n.png'}]",[],"[{'reaction': '👍', 'users': ['sagar007', 'Kukedlc', 'John6666', 'khanhnamle1994', 'maximuspowers', 'alielfilali01', 'Arakinas', 'darkzbaron', 'damerajee', 'Tahahah', 'andito', 'noobmldude'], 'count': 12}, {'reaction': '🚀', 'users': ['damerajee', 'Tahahah', 'louisbrulenaudet', 'andito'], 'count': 4}]",2024-08-29 13:28:54,2024-08-29 13:28:54.254,[],/posts/merve/396620243063150,2407,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,644265186134493,"[{'type': 'text', 'value': ' 📫 A packed AI in the News edition today!', 'raw': ' 📫 A packed AI in the News edition today!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""📉 Nvidia Revenue Jumps 122% in Positive Sign for Tech's A.I. Boom - NYT"", 'raw': ""📉 Nvidia Revenue Jumps 122% in Positive Sign for Tech's A.I. Boom - NYT""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- $30.04 billion revenue, $16.95 billion net income (up from $6.19 billion a year ago)', 'raw': '- $30.04 billion revenue, $16.95 billion net income (up from $6.19 billion a year ago)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Shares in the company fell by as much as 7% in after-hours trading', 'raw': '- Shares in the company fell by as much as 7% in after-hours trading'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Nvidia faces production challenges with its new Blackwell chip and growing competition, including from its own customers', 'raw': '- Nvidia faces production challenges with its new Blackwell chip and growing competition, including from its own customers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Spending on data centers and energy costs to support A.I. is expected to be $1 trillion', 'raw': '- Spending on data centers and energy costs to support A.I. is expected to be $1 trillion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://www.nytimes.com/2024/08/28/technology/nvidia-earnings-ai-stocks.html', 'raw': 'https://www.nytimes.com/2024/08/28/technology/nvidia-earnings-ai-stocks.html'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏛️ California Legislature Approves Bill Proposing Sweeping A.I. Restrictions - NYT', 'raw': '🏛️ California Legislature Approves Bill Proposing Sweeping A.I. Restrictions - NYT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Bill S.B. 1047 would require AI companies to test their systems for safety before public release and allow the state attorney general to sue for serious harms caused by AI.', 'raw': '- Bill S.B. 1047 would require AI companies to test their systems for safety before public release and allow the state attorney general to sue for serious harms caused by AI.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Supporters argue it’s necessary to mitigate AI risks, while critics worry it’s excessively focused on catastrophic harms and could jeopardize open-source AI development.', 'raw': '- Supporters argue it’s necessary to mitigate AI risks, while critics worry it’s excessively focused on catastrophic harms and could jeopardize open-source AI development.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Governor Gavin Newsom has until September 30 to decide on the bill, which could set a national standard for AI regulation if signed into law.', 'raw': '- Governor Gavin Newsom has until September 30 to decide on the bill, which could set a national standard for AI regulation if signed into law.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://www.nytimes.com/2024/08/28/technology/california-ai-safety-bill.html', 'raw': 'https://www.nytimes.com/2024/08/28/technology/california-ai-safety-bill.html'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧑\u200d🏫 Generative AI Transformed English Homework. Math Is Next', 'raw': '🧑\u200d🏫 Generative AI Transformed English Homework. Math Is Next'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Gauth app, which can solve math problems from photos, has millions of downloads', 'raw': '- Gauth app, which can solve math problems from photos, has millions of downloads'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Got a low B in high-school level algebra and geometry in tests by Wired. ""Likely good enough to satisfy bored students who\'d rather spend their time after school doing literally anything else.""', 'raw': '- Got a low B in high-school level algebra and geometry in tests by Wired. ""Likely good enough to satisfy bored students who\'d rather spend their time after school doing literally anything else.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The rise of such AI tools challenges educators to rethink their approach to math homework and teaching methods, possibly leading to a shift towards more in-class practice and personalized learning.', 'raw': '- The rise of such AI tools challenges educators to rethink their approach to math homework and teaching methods, possibly leading to a shift towards more in-class practice and personalized learning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://www.wired.com/story/gauth-ai-math-homework-app/', 'raw': 'https://www.wired.com/story/gauth-ai-math-homework-app/'}]"," 📫 A packed AI in the News edition today! + +📉 Nvidia Revenue Jumps 122% in Positive Sign for Tech's A.I. Boom - NYT +- $30.04 billion revenue, $16.95 billion net income (up from $6.19 billion a year ago) +- Shares in the company fell by as much as 7% in after-hours trading +- Nvidia faces production challenges with its new Blackwell chip and growing competition, including from its own customers +- Spending on data centers and energy costs to support A.I. is expected to be $1 trillion +👉 https://www.nytimes.com/2024/08/28/technology/nvidia-earnings-ai-stocks.html + +🏛️ California Legislature Approves Bill Proposing Sweeping A.I. Restrictions - NYT +- Bill S.B. 1047 would require AI companies to test their systems for safety before public release and allow the state attorney general to sue for serious harms caused by AI. +- Supporters argue it’s necessary to mitigate AI risks, while critics worry it’s excessively focused on catastrophic harms and could jeopardize open-source AI development. +- Governor Gavin Newsom has until September 30 to decide on the bill, which could set a national standard for AI regulation if signed into law. +👉 https://www.nytimes.com/2024/08/28/technology/california-ai-safety-bill.html + +🧑‍🏫 Generative AI Transformed English Homework. Math Is Next +- Gauth app, which can solve math problems from photos, has millions of downloads +- Got a low B in high-school level algebra and geometry in tests by Wired. ""Likely good enough to satisfy bored students who'd rather spend their time after school doing literally anything else."" +- The rise of such AI tools challenges educators to rethink their approach to math homework and teaching methods, possibly leading to a shift towards more in-class practice and personalized learning. +👉 https://www.wired.com/story/gauth-ai-math-homework-app/",[],[],"[{'reaction': '👍', 'users': ['victor', 'John6666', 'JackCloudman', 'louisbrulenaudet'], 'count': 4}]",2024-08-29 13:16:18,2024-08-29 13:16:18.419,[],/posts/fdaudens/644265186134493,868,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,304767116384171,"[{'type': 'text', 'value': 'Very cool to see more and more amazing startups like ', 'raw': 'Very cool to see more and more amazing startups like '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'PrunaAI'}, 'url': 'https://huggingface.co/PrunaAI', 'raw': 'https://huggingface.co/PrunaAI', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/653772fed7616b72795db894/1GPjjeZP_RGRl1pYgFdZc.png'}, {'type': 'text', 'value': ' relying on Hugging Face to get more visibility, distribution and usage!', 'raw': ' relying on Hugging Face to get more visibility, distribution and usage!'}]","Very cool to see more and more amazing startups like https://huggingface.co/PrunaAI relying on Hugging Face to get more visibility, distribution and usage!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/uq-XlINaBQX1FO3gzjCSZ.png'}]",[],"[{'reaction': '❤️', 'users': ['alielfilali01', 'Omarito2412', 'louisbrulenaudet'], 'count': 3}, {'reaction': '👀', 'users': ['John6666', 'victor'], 'count': 2}, {'reaction': '🤗', 'users': ['jnh-ordbogen', 'prithivMLmods'], 'count': 2}]",2024-08-29 11:39:47,2024-12-02 08:32:16.392,"[{'_id': '645e2ecce4504cd77ca14016', 'avatarUrl': '/avatars/2f7a1cfc68e6f5c0a7ddb323d2ffd252.svg', 'fullname': 'Mads', 'name': 'mhenrichsen', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 49, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '6273f303f6d63a28483fde12', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1659336880158-6273f303f6d63a28483fde12.png', 'fullname': 'Lucain Pouget', 'name': 'Wauplin', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 231, 'isFollowing': False}]",/posts/clem/304767116384171,1788,,7 +https://cdn-avatars.huggingface.co/v1/production/uploads/626505d493e0b04d75710566/9rfJc9ORXU9J5a42Ev3v6.png,118.0,Stefano Fiorucci,anakin87,981999224157727,"[{'type': 'text', 'value': '💬 🇮🇹 Phi 3.5 mini ITA: a Small Language Model for Italian', 'raw': '💬 🇮🇹 Phi 3.5 mini ITA: a Small Language Model for Italian'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Lately, I've spent some time fine-tuning language models."", 'raw': ""Lately, I've spent some time fine-tuning language models.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now I am happy to release Phi 3.5 mini ITA: a fine-tuned version of Phi-3.5-mini-instruct to improve performance on the Italian language', 'raw': 'Now I am happy to release Phi 3.5 mini ITA: a fine-tuned version of Phi-3.5-mini-instruct to improve performance on the Italian language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Small (3.82 B parameters) but capable model', 'raw': '🔹 Small (3.82 B parameters) but capable model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 128k context length', 'raw': '🔹 128k context length'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chat with it on 🤗 Spaces: ', 'raw': 'Chat with it on 🤗 Spaces: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'anakin87/Phi-3.5-mini-ITA'}, 'url': 'https://huggingface.co/spaces/anakin87/Phi-3.5-mini-ITA', 'raw': 'https://huggingface.co/spaces/anakin87/Phi-3.5-mini-ITA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model card: ', 'raw': 'Model card: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'anakin87/Phi-3.5-mini-ITA'}, 'url': 'https://huggingface.co/anakin87/Phi-3.5-mini-ITA', 'raw': 'https://huggingface.co/anakin87/Phi-3.5-mini-ITA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗃️ Data', 'raw': '🗃️ Data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Supervised fine-tuning using a good mix of English and Italian data:', 'raw': 'Supervised fine-tuning using a good mix of English and Italian data:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'mlabonne/FineTome-100k'}, 'url': 'https://huggingface.co/datasets/mlabonne/FineTome-100k', 'raw': 'https://huggingface.co/datasets/mlabonne/FineTome-100k'}, {'type': 'text', 'value': ' by ', 'raw': ' by '}, {'type': 'mention', 'user': 'mlabonne', 'raw': '@mlabonne'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'efederici/capybara-claude-15k-ita'}, 'url': 'https://huggingface.co/datasets/efederici/capybara-claude-15k-ita', 'raw': 'https://huggingface.co/datasets/efederici/capybara-claude-15k-ita'}, {'type': 'text', 'value': ' by ', 'raw': ' by '}, {'type': 'mention', 'user': 'efederici', 'raw': '@efederici'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🙏 Thanks to the authors for the datasets.', 'raw': '🙏 Thanks to the authors for the datasets.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Targeted training with Spectrum', 'raw': '🎯 Targeted training with Spectrum'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I used Spectrum, a relatively new technique for parameter-efficient learning.', 'raw': 'I used Spectrum, a relatively new technique for parameter-efficient learning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The idea is to train only the layers of the model with high Signal-to-Noise Ratio (SNR) and ❄️ freeze the rest.', 'raw': 'The idea is to train only the layers of the model with high Signal-to-Noise Ratio (SNR) and ❄️ freeze the rest.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I trained the top 30% of model layers.', 'raw': 'I trained the top 30% of model layers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Spectrum paper: ', 'raw': '📝 Spectrum paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2406.06623', 'raw': 'https://arxiv.org/abs/2406.06623'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Vibe check and performance on Italian benchmarks seem encouraging', 'raw': '📊 Vibe check and performance on Italian benchmarks seem encouraging'}]","💬 🇮🇹 Phi 3.5 mini ITA: a Small Language Model for Italian + +Lately, I've spent some time fine-tuning language models. + +Now I am happy to release Phi 3.5 mini ITA: a fine-tuned version of Phi-3.5-mini-instruct to improve performance on the Italian language + +🔹 Small (3.82 B parameters) but capable model +🔹 128k context length + +Chat with it on 🤗 Spaces: https://huggingface.co/spaces/anakin87/Phi-3.5-mini-ITA +Model card: https://huggingface.co/anakin87/Phi-3.5-mini-ITA + +🗃️ Data +Supervised fine-tuning using a good mix of English and Italian data: +- https://huggingface.co/datasets/mlabonne/FineTome-100k by @mlabonne +- https://huggingface.co/datasets/efederici/capybara-claude-15k-ita by @efederici +🙏 Thanks to the authors for the datasets. + + +🎯 Targeted training with Spectrum +I used Spectrum, a relatively new technique for parameter-efficient learning. +The idea is to train only the layers of the model with high Signal-to-Noise Ratio (SNR) and ❄️ freeze the rest. +I trained the top 30% of model layers. + +📝 Spectrum paper: https://arxiv.org/abs/2406.06623 + + +📊 Vibe check and performance on Italian benchmarks seem encouraging","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626505d493e0b04d75710566/juN6pOZ4lyQrdjKS4eU1P.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626505d493e0b04d75710566/Tkkij8hfXgg65vtkjBdiV.png'}]","[{'_id': '612246596d9ce900691744d2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/612246596d9ce900691744d2/9DlHVQDqblKz7QPTA6nDa.jpeg', 'fullname': 'Edoardo Federici', 'name': 'efederici', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 44}, {'_id': '61b8e2ba285851687028d395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/Rq3xWG7mJ3aCRoBsq340h.jpeg', 'fullname': 'Maxime Labonne', 'name': 'mlabonne', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5406}]","[{'reaction': '👍', 'users': ['mlabonne', 'John6666', 'osanseviero', 'victor', 'merve', 'kristaller486'], 'count': 6}, {'reaction': '🔥', 'users': ['lukecage', '4rtemi5'], 'count': 2}]",2024-08-29 10:17:17,2024-08-29 10:30:16.856,"[{'_id': '626505d493e0b04d75710566', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626505d493e0b04d75710566/9rfJc9ORXU9J5a42Ev3v6.png', 'fullname': 'Stefano Fiorucci', 'name': 'anakin87', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 118, 'isFollowing': False}, {'_id': '61b8e2ba285851687028d395', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b8e2ba285851687028d395/Rq3xWG7mJ3aCRoBsq340h.jpeg', 'fullname': 'Maxime Labonne', 'name': 'mlabonne', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5406, 'isFollowing': False}]",/posts/anakin87/981999224157727,1693,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/65ddc2dd978ab6624db537f6/ZbI4M2Srg3Pbhoi_DLUpV.jpeg,1.0,Sharhabeel Hamdan,hamdanuk,626373388165707,"[{'type': 'text', 'value': ' Looking for Generative AI trainer/speaker for AI accelerator program (Virtual/Online sessions).', 'raw': ' Looking for Generative AI trainer/speaker for AI accelerator program (Virtual/Online sessions).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To get more context about the program, please visit the program landing page: ', 'raw': 'To get more context about the program, please visit the program landing page: '}, {'type': 'link', 'href': 'https://llamadesigndrive.com', 'raw': 'https://llamadesigndrive.com'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you are interested, reach out at sharhabeel@startupbootcamp.org', 'raw': 'If you are interested, reach out at sharhabeel@startupbootcamp.org'}]"," Looking for Generative AI trainer/speaker for AI accelerator program (Virtual/Online sessions). + +To get more context about the program, please visit the program landing page: https://llamadesigndrive.com + +If you are interested, reach out at sharhabeel@startupbootcamp.org",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-08-24 11:48:50,2024-08-24 16:38:52.177,"[{'_id': '66ca081fed5c5babde69b8c3', 'avatarUrl': '/avatars/7c96117109dd076ab802eeeb2c090a71.svg', 'fullname': 'Matthew egbenede ogheneroro ', 'name': 'Martech', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/hamdanuk/626373388165707,452,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/600ae38cc92b79f54efd4556/cSqRIslYl5L3I4WK3a31f.png,83.0,Hieu Lam,lamhieu,523508059853017,"[{'type': 'text', 'value': '🎯 Ghost 8B Beta 1608: Empowering Your AI Assistant', 'raw': '🎯 Ghost 8B Beta 1608: Empowering Your AI Assistant'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📦 Unlock the Power of Ghost 8B Beta 1608: Build Your Personal AI Companion', 'raw': '📦 Unlock the Power of Ghost 8B Beta 1608: Build Your Personal AI Companion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Ghost 8B Beta 1608 empowers you to create a safe and multilingual AI assistant tailored to your needs, directly on your personal computer. 🧑\u200d💻 Leverage AI's capabilities within your own space! 🚀 Ghost 8B Beta 1608 is ready to become your AI companion."", 'raw': ""Ghost 8B Beta 1608 empowers you to create a safe and multilingual AI assistant tailored to your needs, directly on your personal computer. 🧑\u200d💻 Leverage AI's capabilities within your own space! 🚀 Ghost 8B Beta 1608 is ready to become your AI companion.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '~', 'raw': '~'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📦 개인용 AI 보조 도구로 Ghost 8B Beta 1608를 활용하세요!', 'raw': '📦 개인용 AI 보조 도구로 Ghost 8B Beta 1608를 활용하세요!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ghost 8B Beta 1608, AI의 힘을 활용하여 안전하고 개인화된 언어 지원을 제공하는 AI 보조 도구를 직접 구축할 수 있습니다. 🧑\u200d💻 개인 컴퓨터에서 AI의 혜택을 누리세요! 🚀 Ghost 8B Beta 1608는 당신의 AI 파트너가 될 준비가 되어 있습니다.', 'raw': 'Ghost 8B Beta 1608, AI의 힘을 활용하여 안전하고 개인화된 언어 지원을 제공하는 AI 보조 도구를 직접 구축할 수 있습니다. 🧑\u200d💻 개인 컴퓨터에서 AI의 혜택을 누리세요! 🚀 Ghost 8B Beta 1608는 당신의 AI 파트너가 될 준비가 되어 있습니다.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'lamhieu/ghost-8b-beta-8k'}, 'url': 'https://huggingface.co/spaces/lamhieu/ghost-8b-beta-8k', 'raw': 'https://huggingface.co/spaces/lamhieu/ghost-8b-beta-8k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'ghost-x/ghost-8b-beta-668ead6179f93be717db4542'}, 'url': 'https://huggingface.co/collections/ghost-x/ghost-8b-beta-668ead6179f93be717db4542', 'raw': 'https://huggingface.co/collections/ghost-x/ghost-8b-beta-668ead6179f93be717db4542'}, {'type': 'new_line', 'raw': '\n'}]","🎯 Ghost 8B Beta 1608: Empowering Your AI Assistant +📦 Unlock the Power of Ghost 8B Beta 1608: Build Your Personal AI Companion +Ghost 8B Beta 1608 empowers you to create a safe and multilingual AI assistant tailored to your needs, directly on your personal computer. 🧑‍💻 Leverage AI's capabilities within your own space! 🚀 Ghost 8B Beta 1608 is ready to become your AI companion. +~ +📦 개인용 AI 보조 도구로 Ghost 8B Beta 1608를 활용하세요! +Ghost 8B Beta 1608, AI의 힘을 활용하여 안전하고 개인화된 언어 지원을 제공하는 AI 보조 도구를 직접 구축할 수 있습니다. 🧑‍💻 개인 컴퓨터에서 AI의 혜택을 누리세요! 🚀 Ghost 8B Beta 1608는 당신의 AI 파트너가 될 준비가 되어 있습니다. +https://huggingface.co/spaces/lamhieu/ghost-8b-beta-8k +https://huggingface.co/collections/ghost-x/ghost-8b-beta-668ead6179f93be717db4542 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/600ae38cc92b79f54efd4556/QseNBSRZxePf-RtXqXA83.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'ecyht2'], 'count': 2}, {'reaction': '🔥', 'users': ['lrq3000', 'KingNish'], 'count': 2}, {'reaction': '👍', 'users': ['brainhome'], 'count': 1}]",2024-08-24 11:42:45,2024-08-24 11:58:01.502,[],/posts/lamhieu/523508059853017,1760,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg,1222.0,Nishith Jain,KingNish,427716208274372,"[{'type': 'text', 'value': 'Introducing Voicee, A superfast voice fast assistant.', 'raw': 'Introducing Voicee, A superfast voice fast assistant.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'KingNish/Voicee'}, 'url': 'https://huggingface.co/spaces/KingNish/Voicee', 'raw': 'https://huggingface.co/spaces/KingNish/Voicee'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It achieved latency <500 ms.', 'raw': 'It achieved latency <500 ms.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'While its average latency is 700ms.', 'raw': 'While its average latency is 700ms.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It works best in Google Chrome.', 'raw': 'It works best in Google Chrome.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Please try and give your feedbacks.', 'raw': 'Please try and give your feedbacks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thank you. 🤗', 'raw': 'Thank you. 🤗'}]","Introducing Voicee, A superfast voice fast assistant. +https://huggingface.co/spaces/KingNish/Voicee +It achieved latency <500 ms. +While its average latency is 700ms. +It works best in Google Chrome. +Please try and give your feedbacks. +Thank you. 🤗",[],[],"[{'reaction': '🔥', 'users': ['Bruhn', 'John6666', 'm-ric', 'prithivMLmods', 'hamedj', 'ajibawa-2023', 'osanseviero', 'victor'], 'count': 8}, {'reaction': '👀', 'users': ['John6666', 'ParthSadaria'], 'count': 2}]",2024-08-24 10:56:33,2024-09-08 13:01:04.528,"[{'_id': '656e3808d4de03a07d116850', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg', 'fullname': 'Kenneth Hamilton', 'name': 'ZennyKenny', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 77, 'isFollowing': False}, {'_id': '66d0bb3d13440f78268b701d', 'avatarUrl': '/avatars/3b6849dd6733cb8d110a795cbebe9bce.svg', 'fullname': 'Anton Rifco', 'name': 'rifco', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '6591cd2350d39af7f43b2cc9', 'avatarUrl': '/avatars/bd5045f69d256bfb530169f6f6c9796c.svg', 'fullname': 'Brian Hassan', 'name': 'Anon61Iam', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/KingNish/427716208274372,3605,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,223181170945582,"[{'type': 'text', 'value': 'Alan Turing\'s mind-bender: ""Can machines think?"" in its glorified form. This 74yr old paper laid the foundation for how we think about AI and machine intelligence today. The level of detail, clarity and foresight is just phenomenal - he was way ahead of his time 🧠🤖', 'raw': 'Alan Turing\'s mind-bender: ""Can machines think?"" in its glorified form. This 74yr old paper laid the foundation for how we think about AI and machine intelligence today. The level of detail, clarity and foresight is just phenomenal - he was way ahead of his time 🧠🤖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Original copy: ', 'raw': 'Original copy: '}, {'type': 'link', 'href': 'https://archive.org/details/MIND--COMPUTING-MACHINERY-AND-INTELLIGENCE', 'raw': 'https://archive.org/details/MIND--COMPUTING-MACHINERY-AND-INTELLIGENCE'}]","Alan Turing's mind-bender: ""Can machines think?"" in its glorified form. This 74yr old paper laid the foundation for how we think about AI and machine intelligence today. The level of detail, clarity and foresight is just phenomenal - he was way ahead of his time 🧠🤖 + +Original copy: https://archive.org/details/MIND--COMPUTING-MACHINERY-AND-INTELLIGENCE","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/tdIi4quSXarCOTl-K3tb8.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/IikVjILH90eLYTzBSJ3En.png'}]",[],"[{'reaction': '👀', 'users': ['John6666', 'm-ric', 'edison1'], 'count': 3}, {'reaction': '👍', 'users': ['dashfunnydashdash', 'KokuJP', 'dark-pen'], 'count': 3}]",2024-08-24 01:01:39,2024-08-24 01:01:39.714,[],/posts/Jaward/223181170945582,1487,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/643ac5d2e2b979ae6144d68c/Z7PCNopn4cQeAYnVJDoqG.png,244.0,nyuuzyou,nyuuzyou,515587584811622,"[{'type': 'text', 'value': '🌐 Check out the new dataset sourced from Fishki.net, one of the popular entertainment and news portals in the Russian Internet, known for its diverse content including humor, interesting facts, and viral stories - ', 'raw': '🌐 Check out the new dataset sourced from Fishki.net, one of the popular entertainment and news portals in the Russian Internet, known for its diverse content including humor, interesting facts, and viral stories - '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nyuuzyou/fishkinet-posts'}, 'url': 'https://huggingface.co/datasets/nyuuzyou/fishkinet-posts', 'raw': 'https://huggingface.co/datasets/nyuuzyou/fishkinet-posts'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Dataset highlights:', 'raw': '📊 Dataset highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 369,180 posts', 'raw': '- 369,180 posts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Includes original posts with titles, content, images, and metadata', 'raw': '- Includes original posts with titles, content, images, and metadata'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Each entry contains URL, title, author, date, tags, content, and image URLs', 'raw': '- Each entry contains URL, title, author, date, tags, content, and image URLs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Primarily in Russian language', 'raw': '- Primarily in Russian language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Covers a wide range of topics in entertainment, news, and social media content', 'raw': '- Covers a wide range of topics in entertainment, news, and social media content'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Spans nearly two decades of posts, likely from early 2000s to 2024', 'raw': '- Spans nearly two decades of posts, likely from early 2000s to 2024'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Dedicated to public domain under Creative Commons Zero (CC0) license', 'raw': '- Dedicated to public domain under Creative Commons Zero (CC0) license'}]","🌐 Check out the new dataset sourced from Fishki.net, one of the popular entertainment and news portals in the Russian Internet, known for its diverse content including humor, interesting facts, and viral stories - https://huggingface.co/datasets/nyuuzyou/fishkinet-posts. + +📊 Dataset highlights: +- 369,180 posts +- Includes original posts with titles, content, images, and metadata +- Each entry contains URL, title, author, date, tags, content, and image URLs +- Primarily in Russian language +- Covers a wide range of topics in entertainment, news, and social media content +- Spans nearly two decades of posts, likely from early 2000s to 2024 +- Dedicated to public domain under Creative Commons Zero (CC0) license",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}, {'reaction': '🔥', 'users': ['kristaller486'], 'count': 1}]",2024-08-23 22:26:46,2024-08-23 22:26:46.262,[],/posts/nyuuzyou/515587584811622,904,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/608aabf24955d2bfc3cd99c6/T762Ut0Y-w0sZB2ynvfbJ.jpeg,245.0,Aritra Roy Gosthipaty,ariG23498,958062051696534,"[{'type': 'text', 'value': 'You can now use DoRA for your embedding layers!', 'raw': 'You can now use DoRA for your embedding layers!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PR: ', 'raw': 'PR: '}, {'type': 'link', 'href': 'https://github.com/huggingface/peft/pull/2006', 'raw': 'https://github.com/huggingface/peft/pull/2006'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have documented my journey of this specific PR in a blog post for everyone to read. The highlight of the PR was when the first author of DoRA reviewed my code.', 'raw': 'I have documented my journey of this specific PR in a blog post for everyone to read. The highlight of the PR was when the first author of DoRA reviewed my code.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog Post: ', 'raw': 'Blog Post: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/ariG23498/peft-dora', 'raw': 'https://huggingface.co/blog/ariG23498/peft-dora'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Huge thanks to ', 'raw': 'Huge thanks to '}, {'type': 'mention', 'user': 'BenjaminB', 'raw': '@BenjaminB'}, {'type': 'text', 'value': ' for all the help I needed.', 'raw': ' for all the help I needed.'}]","You can now use DoRA for your embedding layers! + +PR: https://github.com/huggingface/peft/pull/2006 + +I have documented my journey of this specific PR in a blog post for everyone to read. The highlight of the PR was when the first author of DoRA reviewed my code. + +Blog Post: https://huggingface.co/blog/ariG23498/peft-dora + +Huge thanks to @BenjaminB for all the help I needed.",[],"[{'_id': '62bf03d1e80cec527083cd66', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1656685953025-62bf03d1e80cec527083cd66.jpeg', 'fullname': 'Benjamin Bossan', 'name': 'BenjaminB', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 53}]","[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'John6666', 'OzzyGT', 'OxxoCodes', 'khanhduong241218', 'osanseviero', 'misba-code'], 'count': 7}]",2024-08-23 18:42:38,2024-08-23 18:42:38.809,[],/posts/ariG23498/958062051696534,1641,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png,5900.0,Joshua,Xenova,503093709017134,"[{'type': 'text', 'value': ""I can't believe this... Phi-3.5-mini (3.8B) running in-browser at ~90 tokens/second on WebGPU w/ Transformers.js and ONNX Runtime Web! 🤯 Since everything runs 100% locally, no messages are sent to a server — a huge win for privacy!"", 'raw': ""I can't believe this... Phi-3.5-mini (3.8B) running in-browser at ~90 tokens/second on WebGPU w/ Transformers.js and ONNX Runtime Web! 🤯 Since everything runs 100% locally, no messages are sent to a server — a huge win for privacy!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🤗 Demo: ', 'raw': '- 🤗 Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'webml-community/phi-3.5-webgpu'}, 'url': 'https://huggingface.co/spaces/webml-community/phi-3.5-webgpu', 'raw': 'https://huggingface.co/spaces/webml-community/phi-3.5-webgpu'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 🧑\u200d💻 Source code: ', 'raw': '- 🧑\u200d💻 Source code: '}, {'type': 'link', 'href': 'https://github.com/huggingface/transformers.js-examples/tree/main/phi-3.5-webgpu', 'raw': 'https://github.com/huggingface/transformers.js-examples/tree/main/phi-3.5-webgpu'}]","I can't believe this... Phi-3.5-mini (3.8B) running in-browser at ~90 tokens/second on WebGPU w/ Transformers.js and ONNX Runtime Web! 🤯 Since everything runs 100% locally, no messages are sent to a server — a huge win for privacy! +- 🤗 Demo: https://huggingface.co/spaces/webml-community/phi-3.5-webgpu +- 🧑‍💻 Source code: https://github.com/huggingface/transformers.js-examples/tree/main/phi-3.5-webgpu","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b253b7ac5ecaae3d1efe0c/QUWyYmAATpcjIr9o41b8I.mp4'}]",[],"[{'reaction': '🔥', 'users': ['cschroeder', 'DmitryRyumin', 'John6666', '4rtemi5', 'DenyTranDFW', 'Rybens', 'YaTharThShaRma999', 'massimoavvisati', 'Bruhn', 'nguyenbh', 'DiamanteAmarelo', 'TheDrunkenSnail', 'not-lain', 'cfahlgren1', 'loubnabnl', 'Ramikan-BR', 'Omarito2412', 'revolunet', 'osanseviero', 'jweston', 'cahlen', 'captainspock', 'louisbrulenaudet', 'rmanoj', 'ethix', 'RojoXlon', 'jundialwan', 'ghosty-0', 'chrislhow', 'gianpaj', 'parksthecoder'], 'count': 31}, {'reaction': '🚀', 'users': ['jdspugh', 'DiamanteAmarelo', 'cfahlgren1', 'Ramikan-BR', 'GordonM', 'cahlen'], 'count': 6}, {'reaction': '😔', 'users': ['ZeroWw', 'cahlen'], 'count': 2}, {'reaction': '❤️', 'users': ['Ramikan-BR', 'cahlen'], 'count': 2}, {'reaction': '👀', 'users': ['Ramikan-BR', 'cahlen'], 'count': 2}, {'reaction': '🤯', 'users': ['ayouba'], 'count': 1}]",2024-08-23 15:14:41,2024-11-04 17:14:09.378,"[{'_id': '6646428e923866048f3e13e0', 'avatarUrl': '/avatars/54483699273ac58a4a6fe1fa4aab65fe.svg', 'fullname': 'Robert Sinclair', 'name': 'ZeroWw', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 100, 'isFollowing': False}, {'_id': '63119cc5af10c9efa1e9b620', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63119cc5af10c9efa1e9b620/RA-UgDNTPsF6j5uDnG3-N.jpeg', 'fullname': 'Akarshan Biswas', 'name': 'qnixsynapse', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}, {'_id': '630b7396973a51d2115df881', 'avatarUrl': '/avatars/4b954dcff7739057e105c6e9019d7ca2.svg', 'fullname': 'Nicki Gataro', 'name': 'ceoofcapybaras', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '631ba7d75b628ccf3cea4add', 'avatarUrl': '/avatars/866bd569f610f81a33ecf0d2077213bd.svg', 'fullname': 'Ras', 'name': 'Ke09876', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '63c20dea726f62e411fc7f64', 'avatarUrl': '/avatars/a996119e73a87724100e82babffe70ad.svg', 'fullname': 'Jon Schlinkert', 'name': 'jonschlinkert', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '652925f790f06dd8a8f04667', 'avatarUrl': '/avatars/c8891564e64052c3af1c07d11e7d74bf.svg', 'fullname': 'Nomi', 'name': 'Agroni', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '62a63e4a57845f11f3801bc2', 'avatarUrl': '/avatars/d9d2d692aef76c4131be177b693f109f.svg', 'fullname': 'Gianfranco Palumbo', 'name': 'gianpaj', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/Xenova/503093709017134,14074,,11 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/fTCV7VLY0eK4OXbwgIT2n.png,141.0,LagPixelLOL,v2ray,983464411136936,"[{'type': 'resource', 'resource': {'type': 'model', 'id': 'v2ray/deepgelbooru'}, 'url': 'https://huggingface.co/v2ray/deepgelbooru', 'raw': 'https://huggingface.co/v2ray/deepgelbooru'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A Danbooru tag image tagger, maybe better than WD14 at some images.', 'raw': 'A Danbooru tag image tagger, maybe better than WD14 at some images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Training code, inference code, dataset included.', 'raw': 'Training code, inference code, dataset included.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ':3', 'raw': ':3'}]","https://huggingface.co/v2ray/deepgelbooru +A Danbooru tag image tagger, maybe better than WD14 at some images. +Training code, inference code, dataset included. +:3",[],[],"[{'reaction': '😎', 'users': ['John6666', 'Etherll', 'nyuuzyou'], 'count': 3}, {'reaction': '🧠', 'users': ['John6666', 'den0620'], 'count': 2}]",2024-08-23 15:11:04,2024-08-24 15:53:47.520,"[{'_id': '635c2f5c3cb827d58118bb01', 'avatarUrl': '/avatars/0087f207c06a793c55ed0489ff793e70.svg', 'fullname': 'nicolo', 'name': 'nicolollo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '64aedccdd2373ae50cf5eaf0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/fTCV7VLY0eK4OXbwgIT2n.png', 'fullname': 'LagPixelLOL', 'name': 'v2ray', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 141, 'isFollowing': False}, {'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/v2ray/983464411136936,2027,,5 +/avatars/f32291df2054c1bb4a01889d1b41c0d5.svg,23.0,Christopher Schröder,cschroeder,322138284948005,"[{'type': 'text', 'value': '📄 ACL 2024: The Missing Papers', 'raw': '📄 ACL 2024: The Missing Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Apparently, some papers from the ACL 2024 are still not listed in the ACL Anthology. While this issue will hopefully be fixed soon, we should give those papers additional spotlight.', 'raw': 'Apparently, some papers from the ACL 2024 are still not listed in the ACL Anthology. While this issue will hopefully be fixed soon, we should give those papers additional spotlight.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Some of my favorites:', 'raw': 'Some of my favorites:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Dolma is an English corpus that encompasses 3 trillion tokens. Additionally, it is accompanied by an exceptional software package that consdierably advances the state-of-the-art in preparing data for LLM pretraining. (Source: I am currently using Dolma.)', 'raw': '1. Dolma is an English corpus that encompasses 3 trillion tokens. Additionally, it is accompanied by an exceptional software package that consdierably advances the state-of-the-art in preparing data for LLM pretraining. (Source: I am currently using Dolma.)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.00159'}, 'url': 'https://huggingface.co/papers/2402.00159', 'raw': 'https://huggingface.co/papers/2402.00159', 'label': 'Dolma: an Open Corpus of Three Trillion Tokens for Language Model\n Pretraining Research (2402.00159)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. In the paper ""Same Task, More Tokens: the Impact of Input Length on', 'raw': '2. In the paper ""Same Task, More Tokens: the Impact of Input Length on'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'the Reasoning Performance of Large Language Models"", the authors show how extending the context length impacts an LLM\'s reasoning performance. I asked myself a similar question a few months ago, and therefore this paper is highly interesting to me.', 'raw': 'the Reasoning Performance of Large Language Models"", the authors show how extending the context length impacts an LLM\'s reasoning performance. I asked myself a similar question a few months ago, and therefore this paper is highly interesting to me.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.14848'}, 'url': 'https://huggingface.co/papers/2402.14848', 'raw': 'https://huggingface.co/papers/2402.14848', 'label': 'Same Task, More Tokens: the Impact of Input Length on the Reasoning\n Performance of Large Language Models (2402.14848)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This was brought to my attention through a Linkedin post by ', 'raw': 'This was brought to my attention through a Linkedin post by '}, {'type': 'mention', 'user': 'ShayeghB', 'raw': '@ShayeghB'}, {'type': 'text', 'value': ', who is also affected:', 'raw': ', who is also affected:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.00143'}, 'url': 'https://huggingface.co/papers/2403.00143', 'raw': 'https://huggingface.co/papers/2403.00143', 'label': 'Ensemble-Based Unsupervised Discontinuous Constituency Parsing by Tree\n Averaging (2403.00143)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'View all the missing papers here: ', 'raw': 'View all the missing papers here: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://theshayegh.github.io/ACL2024MissingPapers/', 'raw': 'https://theshayegh.github.io/ACL2024MissingPapers/'}]","📄 ACL 2024: The Missing Papers + +Apparently, some papers from the ACL 2024 are still not listed in the ACL Anthology. While this issue will hopefully be fixed soon, we should give those papers additional spotlight. + +Some of my favorites: + +1. Dolma is an English corpus that encompasses 3 trillion tokens. Additionally, it is accompanied by an exceptional software package that consdierably advances the state-of-the-art in preparing data for LLM pretraining. (Source: I am currently using Dolma.) +https://huggingface.co/papers/2402.00159 + +2. In the paper ""Same Task, More Tokens: the Impact of Input Length on +the Reasoning Performance of Large Language Models"", the authors show how extending the context length impacts an LLM's reasoning performance. I asked myself a similar question a few months ago, and therefore this paper is highly interesting to me. +https://huggingface.co/papers/2402.14848 + +This was brought to my attention through a Linkedin post by @ShayeghB, who is also affected: +https://huggingface.co/papers/2403.00143 + +View all the missing papers here: +https://theshayegh.github.io/ACL2024MissingPapers/",[],"[{'_id': '63daa4de0cc3bc12bc05cddc', 'avatarUrl': '/avatars/4bde764fb817c9f11d2ab145806d0ff5.svg', 'fullname': 'Behzad Shayegh', 'name': 'ShayeghB', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-08-23 15:10:12,2024-08-23 21:09:55.493,[],/posts/cschroeder/322138284948005,358,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/BJLU1-6HOueSdNHrPoCQn.jpeg,,ahmadalfakeh,ahmadalfakeh,214845461391146,"[{'type': 'text', 'value': 'how to intaialize zerogpu on hf space?', 'raw': 'how to intaialize zerogpu on hf space?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","how to intaialize zerogpu on hf space? + + +",[],[],"[{'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-08-23 14:07:19,2024-08-23 16:12:24.857,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}, {'_id': '66b9534229602640270d8da0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/BJLU1-6HOueSdNHrPoCQn.jpeg', 'fullname': 'ahmadalfakeh', 'name': 'ahmadalfakeh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/ahmadalfakeh/214845461391146,307,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64be41c330a1f0f0f0a1e0eb/8W-z1xMGnQGQ2M4LXnZfy.jpeg,94.0,VORTEX,Abhaykoul,972313929139427,"[{'type': 'text', 'value': 'Introducing HelpingAI2-9B, an emotionally intelligent LLM. ', 'raw': 'Introducing HelpingAI2-9B, an emotionally intelligent LLM. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model Link : ', 'raw': 'Model Link : '}, {'type': 'link', 'href': 'https://huggingface.co/OEvortex/HelpingAI2-9B', 'raw': 'https://huggingface.co/OEvortex/HelpingAI2-9B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo Link: ', 'raw': 'Demo Link: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Abhaykoul/HelpingAI2'}, 'url': 'https://huggingface.co/spaces/Abhaykoul/HelpingAI2', 'raw': 'https://huggingface.co/spaces/Abhaykoul/HelpingAI2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This model is part of the innovative HelpingAI series and it stands out for its ability to engage users with emotional understanding.', 'raw': 'This model is part of the innovative HelpingAI series and it stands out for its ability to engage users with emotional understanding.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Features:', 'raw': 'Key Features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '-----------------', 'raw': '-----------------'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* It gets 95.89 score on EQ Bench greather than all top notch LLMs, reflecting advanced emotional recognition.', 'raw': '* It gets 95.89 score on EQ Bench greather than all top notch LLMs, reflecting advanced emotional recognition.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* It gives responses in empathetic and supportive manner.', 'raw': '* It gives responses in empathetic and supportive manner.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Must try our demo: ', 'raw': 'Must try our demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Abhaykoul/HelpingAI2'}, 'url': 'https://huggingface.co/spaces/Abhaykoul/HelpingAI2', 'raw': 'https://huggingface.co/spaces/Abhaykoul/HelpingAI2'}]","Introducing HelpingAI2-9B, an emotionally intelligent LLM. +Model Link : https://huggingface.co/OEvortex/HelpingAI2-9B +Demo Link: https://huggingface.co/spaces/Abhaykoul/HelpingAI2 + +This model is part of the innovative HelpingAI series and it stands out for its ability to engage users with emotional understanding. + +Key Features: +----------------- + +* It gets 95.89 score on EQ Bench greather than all top notch LLMs, reflecting advanced emotional recognition. +* It gives responses in empathetic and supportive manner. + +Must try our demo: https://huggingface.co/spaces/Abhaykoul/HelpingAI2",[],[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'Ken0965', 'VictorSanh', 'thebryanalvarado', 'KingNish', 'ha1772007', 'GPT007', 'victor'], 'count': 8}, {'reaction': '❤️', 'users': ['ijohn07', 'KingNish', 'victor'], 'count': 3}, {'reaction': '👀', 'users': ['John6666'], 'count': 1}]",2024-08-16 11:38:23,2024-12-02 23:47:13.458,"[{'_id': '669ec4832114776bafe86724', 'avatarUrl': '/avatars/33e31a6ec1ae8800373de4d832a29627.svg', 'fullname': 'PsychoPrincess', 'name': 'PsychoPrincess', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Abhaykoul/972313929139427,3278,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/665074ded3e886a93d713e73/tTAkhsz2J-uEQAfSsRvAf.jpeg,29.0,hlky,hlky,343314951558641,"[{'type': 'text', 'value': ""Announcing another BIG data drop! This time it's ~275M images from Flickr "", 'raw': ""Announcing another BIG data drop! This time it's ~275M images from Flickr ""}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'bigdata-pw/Flickr'}, 'url': 'https://huggingface.co/datasets/bigdata-pw/Flickr', 'raw': 'https://huggingface.co/datasets/bigdata-pw/Flickr'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Data acquisition for this project is still in progress, get ready for an update soon:tm: ', 'raw': 'Data acquisition for this project is still in progress, get ready for an update soon:tm: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In case you missed them; other BIG data drops include Diffusion1B ', 'raw': 'In case you missed them; other BIG data drops include Diffusion1B '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'bigdata-pw/Diffusion1B'}, 'url': 'https://huggingface.co/datasets/bigdata-pw/Diffusion1B', 'raw': 'https://huggingface.co/datasets/bigdata-pw/Diffusion1B'}, {'type': 'text', 'value': ' - ~1.23B images and generation parameters from a variety of diffusion models and if you fancy practicing diffusion model training check out Dataception ', 'raw': ' - ~1.23B images and generation parameters from a variety of diffusion models and if you fancy practicing diffusion model training check out Dataception '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'bigdata-pw/Dataception'}, 'url': 'https://huggingface.co/datasets/bigdata-pw/Dataception', 'raw': 'https://huggingface.co/datasets/bigdata-pw/Dataception'}, {'type': 'text', 'value': ' - a dataset of over 5000 datasets in WebDataset format!', 'raw': ' - a dataset of over 5000 datasets in WebDataset format!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Requests are always welcome so reach out if there's a dataset you'd like to see!"", 'raw': ""Requests are always welcome so reach out if there's a dataset you'd like to see!""}]","Announcing another BIG data drop! This time it's ~275M images from Flickr https://huggingface.co/datasets/bigdata-pw/Flickr + +Data acquisition for this project is still in progress, get ready for an update soon:tm: + +In case you missed them; other BIG data drops include Diffusion1B https://huggingface.co/datasets/bigdata-pw/Diffusion1B - ~1.23B images and generation parameters from a variety of diffusion models and if you fancy practicing diffusion model training check out Dataception https://huggingface.co/datasets/bigdata-pw/Dataception - a dataset of over 5000 datasets in WebDataset format! + +Requests are always welcome so reach out if there's a dataset you'd like to see!",[],[],"[{'reaction': '👍', 'users': ['ajibawa-2023', 'John6666', 'Mdubbya', 'dashfunnydashdash', 'ZeroWw'], 'count': 5}]",2024-08-15 18:55:29,2024-08-16 09:17:26.971,"[{'_id': '64d323b950310d7ad397fce7', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64d323b950310d7ad397fce7/KKGL1Y5ZeQSBTqaeKQCaY.png', 'fullname': 'Ryan Miller', 'name': 'Meroar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/hlky/343314951558641,1934,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,576586250383339,"[{'type': 'text', 'value': '✨ Feeling thankful... ', 'raw': '✨ Feeling thankful... '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🇮🇳 15th August, 2024; on India's 78th Independence Day "", 'raw': ""🇮🇳 15th August, 2024; on India's 78th Independence Day ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎉 Crossed 100 followers on Hugging Face', 'raw': '🎉 Crossed 100 followers on Hugging Face'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 Got LinkedIn Top Voice', 'raw': '🏆 Got LinkedIn Top Voice'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 AI has never been more exciting and I am here for it', 'raw': '🤖 AI has never been more exciting and I am here for it'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👀 ', 'raw': '👀 '}, {'type': 'mention', 'user': 'clem', 'raw': '@clem'}, {'type': 'text', 'value': ' Can I be a Hugging Face fellow now? ', 'raw': ' Can I be a Hugging Face fellow now? '}]","✨ Feeling thankful... + +🇮🇳 15th August, 2024; on India's 78th Independence Day + +🎉 Crossed 100 followers on Hugging Face + +🏆 Got LinkedIn Top Voice + +🤖 AI has never been more exciting and I am here for it + +👀 @clem Can I be a Hugging Face fellow now? ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/-10pNrJFQQKsNlXf5_kqs.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/BVppofWzAZ1xB8jYIYhZB.jpeg'}]","[{'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489}]","[{'reaction': '🚀', 'users': ['wonderboy', 'DavidAU', 'mmx31', 'KingNish', 'YaTharThShaRma999'], 'count': 5}]",2024-08-15 18:29:10,2024-08-15 18:29:10.232,[],/posts/singhsidhukuldeep/576586250383339,1723,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg,7068.0,Bartowski,bartowski,608656345183499,"[{'type': 'text', 'value': 'As some of you know, I try to convert models to either fp32 or bf16 depending on theirs size before doing imatrix and quantization', 'raw': 'As some of you know, I try to convert models to either fp32 or bf16 depending on theirs size before doing imatrix and quantization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Today I decided to see if that matters, and the results have me.. for lack of a better word, perplexed', 'raw': 'Today I decided to see if that matters, and the results have me.. for lack of a better word, perplexed'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My setup:', 'raw': 'My setup:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mistral Nemo Instruct 2407', 'raw': 'Mistral Nemo Instruct 2407'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- convert to FP32, calculate imatrix, quantize to Q8_0 and Q4_K_M', 'raw': '- convert to FP32, calculate imatrix, quantize to Q8_0 and Q4_K_M'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- convert to FP16, calculate imatrix, quantize to Q8_0 and Q4_K_M', 'raw': '- convert to FP16, calculate imatrix, quantize to Q8_0 and Q4_K_M'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I calculated the kld base from the FP32 model:', 'raw': 'I calculated the kld base from the FP32 model:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'inline_code', 'code': './llama-perplexity -m /models/Mistral-Nemo-Instruct-2407-f32.gguf -f /training_data/wikitext-2-raw/wiki.test.raw --kl-divergence-base /training_data/mistral-nemo-f32.kld -ngl 35 -fa -sm row', 'raw': '`./llama-perplexity -m /models/Mistral-Nemo-Instruct-2407-f32.gguf -f /training_data/wikitext-2-raw/wiki.test.raw --kl-divergence-base /training_data/mistral-nemo-f32.kld -ngl 35 -fa -sm row`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'then calculated the divergence itself for each like so:', 'raw': 'then calculated the divergence itself for each like so:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'inline_code', 'code': './llama-perplexity -m /models/Mistral-Nemo-Instruct-2407-Q8_0.gguf -f /training_data/wikitext-2-raw/wiki.test.raw --kl-divergence-base /training_data/mistral-nemo-f32.kld --kl-divergence -ngl 50 -fa -sm row', 'raw': '`./llama-perplexity -m /models/Mistral-Nemo-Instruct-2407-Q8_0.gguf -f /training_data/wikitext-2-raw/wiki.test.raw --kl-divergence-base /training_data/mistral-nemo-f32.kld --kl-divergence -ngl 50 -fa -sm row`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Q4_K_M from fp16 and fp32 were similar, trading blows across statistics, odd since i expected fp32 to be strictly better but it's not"", 'raw': ""Q4_K_M from fp16 and fp32 were similar, trading blows across statistics, odd since i expected fp32 to be strictly better but it's not""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Q8_0 is where things get weird. Despite each file being slightly different size, and the sha256sum of course being different, they each get *completely identical* scores, down to 6 decimal places of precision on the statistics.', 'raw': 'Q8_0 is where things get weird. Despite each file being slightly different size, and the sha256sum of course being different, they each get *completely identical* scores, down to 6 decimal places of precision on the statistics.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""How is this possible? Is there something I don't understand about llama.cpp that makes it always convert to fp16 before it does quantization? Am I wasting time using FP32/BF16??"", 'raw': ""How is this possible? Is there something I don't understand about llama.cpp that makes it always convert to fp16 before it does quantization? Am I wasting time using FP32/BF16??""}]","As some of you know, I try to convert models to either fp32 or bf16 depending on theirs size before doing imatrix and quantization + +Today I decided to see if that matters, and the results have me.. for lack of a better word, perplexed + +My setup: + +Mistral Nemo Instruct 2407 +- convert to FP32, calculate imatrix, quantize to Q8_0 and Q4_K_M +- convert to FP16, calculate imatrix, quantize to Q8_0 and Q4_K_M + +I calculated the kld base from the FP32 model: +`./llama-perplexity -m /models/Mistral-Nemo-Instruct-2407-f32.gguf -f /training_data/wikitext-2-raw/wiki.test.raw --kl-divergence-base /training_data/mistral-nemo-f32.kld -ngl 35 -fa -sm row` + +then calculated the divergence itself for each like so: +`./llama-perplexity -m /models/Mistral-Nemo-Instruct-2407-Q8_0.gguf -f /training_data/wikitext-2-raw/wiki.test.raw --kl-divergence-base /training_data/mistral-nemo-f32.kld --kl-divergence -ngl 50 -fa -sm row` + +Q4_K_M from fp16 and fp32 were similar, trading blows across statistics, odd since i expected fp32 to be strictly better but it's not + +Q8_0 is where things get weird. Despite each file being slightly different size, and the sha256sum of course being different, they each get *completely identical* scores, down to 6 decimal places of precision on the statistics. + +How is this possible? Is there something I don't understand about llama.cpp that makes it always convert to fp16 before it does quantization? Am I wasting time using FP32/BF16??",[],[],"[{'reaction': '👍', 'users': ['Rybens', 'John6666', 'Nelathan', 'YaTharThShaRma999', 'MarinaraSpaghetti', 'DeathGodlike', 'osanseviero', 'julien-c', 'pcuenq', 'mlabonne', 'Joseph717171'], 'count': 11}, {'reaction': '🔥', 'users': ['nazimali', 'osanseviero', 'julien-c', 'Joseph717171'], 'count': 4}, {'reaction': '❤️', 'users': ['ijohn07', 'Joseph717171'], 'count': 2}]",2024-08-15 14:21:20,2024-08-17 07:15:35.631,"[{'_id': '6435718aaaef013d1aec3b8b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg', 'fullname': 'Bartowski', 'name': 'bartowski', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7068, 'isFollowing': False}, {'_id': '63036c501dd5d3c62480dd14', 'avatarUrl': '/avatars/19801a674af5af87bf036aed6a69cce7.svg', 'fullname': 'Johannes Gäßler', 'name': 'JohannesGaessler', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 17, 'isFollowing': False}, {'_id': '65ea44635b64331c067d3751', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65ea44635b64331c067d3751/yCim-7c3tm67o5wWP_6cE.jpeg', 'fullname': 'David Belton', 'name': 'DavidAU', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1367, 'isFollowing': False}, {'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '65cccccefb8ab7fcc2c6424c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65cccccefb8ab7fcc2c6424c/0dlk5hmzNhTWr8j9E1DXP.jpeg', 'fullname': 'Rodri Mora', 'name': 'bullerwins', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 114, 'isFollowing': False}, {'_id': '65a5ad3c0b5704678a8612b9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65a5ad3c0b5704678a8612b9/Fh6TDWyf9EGt6U2_G-v4Q.png', 'fullname': 'AItomek ;P', 'name': 'altomek', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23, 'isFollowing': False}]",/posts/bartowski/608656345183499,6375,,17 +https://cdn-avatars.huggingface.co/v1/production/uploads/6380ebb8471a4550ff255c62/ZWdgPmHQBgvFNHnT65V6-.jpeg,85.0,Batuhan,isidentical,574566841162028,"[{'type': 'resource', 'resource': {'type': 'model', 'id': 'fal/AuraFlow-v0.3'}, 'url': 'https://huggingface.co/fal/AuraFlow-v0.3', 'raw': 'https://huggingface.co/fal/AuraFlow-v0.3'}, {'type': 'text', 'value': ' is now here with support for different aspect resolutions (w/h up to 1536px!) and much nicer aesthetics! Make sure to install the latest diffusers to get support for it.', 'raw': ' is now here with support for different aspect resolutions (w/h up to 1536px!) and much nicer aesthetics! Make sure to install the latest diffusers to get support for it.'}]",https://huggingface.co/fal/AuraFlow-v0.3 is now here with support for different aspect resolutions (w/h up to 1536px!) and much nicer aesthetics! Make sure to install the latest diffusers to get support for it.,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6380ebb8471a4550ff255c62/KB8xrSFixVnVay9yA32ql.png'}]",[],"[{'reaction': '🔥', 'users': ['gokaygokay', 'John6666', 'YaTharThShaRma999', 'NHLOCAL', 'Sri-Vigneshwar-DJ', 'osanseviero', 'isidentical', 'adamelliotfields'], 'count': 8}, {'reaction': '❤️', 'users': ['prithivMLmods', 'osanseviero', 'Fetah06', 'isidentical'], 'count': 4}]",2024-08-15 14:15:17,2024-08-15 14:15:17.634,[],/posts/isidentical/574566841162028,1898,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,541848107798204,"[{'type': 'text', 'value': ""𝗭𝗲𝗿𝗼-𝗺𝗮𝘁𝗵 𝗶𝗻𝘁𝗿𝗼 𝘁𝗼 𝗔𝗜 𝗵𝗶𝘀𝘁𝗼𝗿𝘆: 𝗳𝗿𝗼𝗺 𝘁𝗵𝗲 𝟭𝟵𝟱𝟬𝘀 𝘁𝗼 𝘁𝗼𝗱𝗮𝘆'𝘀 𝗟𝗟𝗠𝘀 📖"", 'raw': ""𝗭𝗲𝗿𝗼-𝗺𝗮𝘁𝗵 𝗶𝗻𝘁𝗿𝗼 𝘁𝗼 𝗔𝗜 𝗵𝗶𝘀𝘁𝗼𝗿𝘆: 𝗳𝗿𝗼𝗺 𝘁𝗵𝗲 𝟭𝟵𝟱𝟬𝘀 𝘁𝗼 𝘁𝗼𝗱𝗮𝘆'𝘀 𝗟𝗟𝗠𝘀 📖""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I wanted to structure my thinking about LLMs by going through their history since the 50s. This history is captivating, with the opposition between Connexionists (Rosenblatt, LeCun) and Symbolists, the first victories of ""deep"" neural networks, the revolution of Attention...', 'raw': 'I wanted to structure my thinking about LLMs by going through their history since the 50s. This history is captivating, with the opposition between Connexionists (Rosenblatt, LeCun) and Symbolists, the first victories of ""deep"" neural networks, the revolution of Attention...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So I might have gone a bit too far! 😅', 'raw': 'So I might have gone a bit too far! 😅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""📝 I've made a long post summarizing the main stages of building LLMs: neural networks, optimization, backpropagation, attention layers..."", 'raw': ""📝 I've made a long post summarizing the main stages of building LLMs: neural networks, optimization, backpropagation, attention layers...""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""✅ And I've made sure to keep it 100% horrible-latex-math-free: the technical stuff is conveyed in graphs only, so it should be accessible to really anyone, even your grandfather (I'm sending it to mine right now)."", 'raw': ""✅ And I've made sure to keep it 100% horrible-latex-math-free: the technical stuff is conveyed in graphs only, so it should be accessible to really anyone, even your grandfather (I'm sending it to mine right now).""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read it here in english 👉 ', 'raw': 'Read it here in english 👉 '}, {'type': 'link', 'href': 'https://aymeric-roucher.github.io/brief-history-of-ai/', 'raw': 'https://aymeric-roucher.github.io/brief-history-of-ai/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pour le post en français 👉 ', 'raw': 'Pour le post en français 👉 '}, {'type': 'link', 'href': 'https://aymeric-roucher.github.io/breve-histoire-de-l-ia/', 'raw': 'https://aymeric-roucher.github.io/breve-histoire-de-l-ia/'}]","𝗭𝗲𝗿𝗼-𝗺𝗮𝘁𝗵 𝗶𝗻𝘁𝗿𝗼 𝘁𝗼 𝗔𝗜 𝗵𝗶𝘀𝘁𝗼𝗿𝘆: 𝗳𝗿𝗼𝗺 𝘁𝗵𝗲 𝟭𝟵𝟱𝟬𝘀 𝘁𝗼 𝘁𝗼𝗱𝗮𝘆'𝘀 𝗟𝗟𝗠𝘀 📖 + +I wanted to structure my thinking about LLMs by going through their history since the 50s. This history is captivating, with the opposition between Connexionists (Rosenblatt, LeCun) and Symbolists, the first victories of ""deep"" neural networks, the revolution of Attention... + +So I might have gone a bit too far! 😅 + +📝 I've made a long post summarizing the main stages of building LLMs: neural networks, optimization, backpropagation, attention layers... + +✅ And I've made sure to keep it 100% horrible-latex-math-free: the technical stuff is conveyed in graphs only, so it should be accessible to really anyone, even your grandfather (I'm sending it to mine right now). + +Read it here in english 👉 https://aymeric-roucher.github.io/brief-history-of-ai/ +Pour le post en français 👉 https://aymeric-roucher.github.io/breve-histoire-de-l-ia/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/dgpq4jC9mRU3ogxDzJhB7.png'}]",[],"[{'reaction': '🤝', 'users': ['megoyaw3', 'Dem1g0d'], 'count': 2}]",2024-08-15 13:59:58,2024-08-15 13:59:58.470,[],/posts/m-ric/541848107798204,995,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,420890568934318,"[{'type': 'text', 'value': 'Some personal and professional news ✨', 'raw': 'Some personal and professional news ✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm writing a book on ML metrics."", 'raw': ""I'm writing a book on ML metrics.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Together with Wojtek Kuberski, we’re creating the missing piece of every ML university program and online course: a book solely dedicated to Machine Learning metrics!', 'raw': 'Together with Wojtek Kuberski, we’re creating the missing piece of every ML university program and online course: a book solely dedicated to Machine Learning metrics!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The book will cover the following types of metrics:', 'raw': 'The book will cover the following types of metrics:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Regression', 'raw': '• Regression'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Classification', 'raw': '• Classification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Clustering', 'raw': '• Clustering'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Ranking', 'raw': '• Ranking'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Vision', 'raw': '• Vision'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Text', 'raw': '• Text'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• GenAI', 'raw': '• GenAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Bias and Fairness', 'raw': '• Bias and Fairness'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 check out the book: ', 'raw': '👉 check out the book: '}, {'type': 'link', 'href': 'https://www.nannyml.com/metrics', 'raw': 'https://www.nannyml.com/metrics'}]","Some personal and professional news ✨ + +I'm writing a book on ML metrics. + +Together with Wojtek Kuberski, we’re creating the missing piece of every ML university program and online course: a book solely dedicated to Machine Learning metrics! + +The book will cover the following types of metrics: +• Regression +• Classification +• Clustering +• Ranking +• Vision +• Text +• GenAI +• Bias and Fairness + +👉 check out the book: https://www.nannyml.com/metrics","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/CGQRzUfEItSfDGDpEN7Wl.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['louisbrulenaudet'], 'count': 1}]",2024-08-15 13:04:54,2024-08-15 14:41:01.073,"[{'_id': '624b392601cc1fd0a161b12e', 'avatarUrl': '/avatars/dcd27ab92a41245d7b96b84ed63ce3a1.svg', 'fullname': 'Mark Moyou', 'name': 'mmoy', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '629a173153a72d997d3f57d0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg', 'fullname': 'Santiago Viquez', 'name': 'santiviquez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 86, 'isFollowing': False}]",/posts/santiviquez/420890568934318,467,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64257c616d0f0f5f1dc6aa2a/WNXC2PcyDn-jt9ZY5Rbka.jpeg,3289.0,Joffrey THOMAS,Jofthomas,525100587174350,"[{'type': 'text', 'value': 'Everchanging Quest is out !', 'raw': 'Everchanging Quest is out !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It is an LLM controlled Rogue-Like in which the LLM gets a markdown representation of the map, and should generate a JSON with the objective to fulfill on the map as well as the necessary objects and their placements.', 'raw': 'It is an LLM controlled Rogue-Like in which the LLM gets a markdown representation of the map, and should generate a JSON with the objective to fulfill on the map as well as the necessary objects and their placements.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Come test it on the space :', 'raw': 'Come test it on the space :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Jofthomas/Everchanging-Quest'}, 'url': 'https://huggingface.co/spaces/Jofthomas/Everchanging-Quest', 'raw': 'https://huggingface.co/spaces/Jofthomas/Everchanging-Quest'}, {'type': 'new_line', 'raw': '\n'}]","Everchanging Quest is out ! + +It is an LLM controlled Rogue-Like in which the LLM gets a markdown representation of the map, and should generate a JSON with the objective to fulfill on the map as well as the necessary objects and their placements. + +Come test it on the space : +https://huggingface.co/spaces/Jofthomas/Everchanging-Quest +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64257c616d0f0f5f1dc6aa2a/_DesbR4fO9A1h5S_RzAh1.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64257c616d0f0f5f1dc6aa2a/fbSL4aKOmeG_pD5KBdWpS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64257c616d0f0f5f1dc6aa2a/aP3aMmL-fHhPIXS2NtZGC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64257c616d0f0f5f1dc6aa2a/LUhMvcloPrZiXVHQGzy8a.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64257c616d0f0f5f1dc6aa2a/fbew9rhUOKyImxpBMmQvO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64257c616d0f0f5f1dc6aa2a/379j-_xBx6iFaGIQzySDw.png'}]",[],"[{'reaction': '🔥', 'users': ['fractalego', 'mrfakename', 'Svngoku', 'Aurelien-Morgan', 'mmhamdy', 'boapps', 'osanseviero', 'jsgreenawalt', 'Joseph717171', 'ChuckMcSneed', 'victor', 'adamelliotfields', 'Lacaz64', 'julien-rodriguez', 'philschmid', 'philipp-zettl', 'AndrewNa', 'azuremous', 'haikunns', 'yomir', 'comarproject', 'hitchhiker3010', 'palsp', 'QuickSilver007'], 'count': 24}, {'reaction': '👀', 'users': ['John6666', 'mmoy', 'Svngoku', 'osanseviero', 'jsgreenawalt', 'Joseph717171', 'ChuckMcSneed', 'Lacaz64', 'philschmid', 'AndrewNa', 'yomir'], 'count': 11}, {'reaction': '👍', 'users': ['yomir', 'MrDevolver', 'kksinha', 'eugpal4'], 'count': 4}, {'reaction': '🧠', 'users': ['yomir'], 'count': 1}, {'reaction': '❤️', 'users': ['yomir'], 'count': 1}, {'reaction': '🚀', 'users': ['yomir'], 'count': 1}, {'reaction': '🤯', 'users': ['yomir'], 'count': 1}, {'reaction': '🤝', 'users': ['yomir'], 'count': 1}]",2024-08-15 09:32:15,2024-09-04 20:40:33.377,"[{'_id': '66b7604386637631f8a41673', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/0aT1gEfBn3pjTwov4akJE.png', 'fullname': 'MIR', 'name': 'yomir', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '64257c616d0f0f5f1dc6aa2a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64257c616d0f0f5f1dc6aa2a/WNXC2PcyDn-jt9ZY5Rbka.jpeg', 'fullname': 'Joffrey THOMAS', 'name': 'Jofthomas', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3289, 'isFollowing': False}]",/posts/Jofthomas/525100587174350,7629,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e2f1cb4dbf9514fb475b48/0EwhfSfMCy8P2e7nJWaOO.jpeg,41.0,Rico Ardiansyah,Blane187,240020635038154,"[{'type': 'text', 'value': ""ehm, so today i've finished my new project"", 'raw': ""ehm, so today i've finished my new project""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Blane187/animalese-py'}, 'url': 'https://huggingface.co/spaces/Blane187/animalese-py', 'raw': 'https://huggingface.co/spaces/Blane187/animalese-py'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'or you can make your voice to animalese with it:', 'raw': 'or you can make your voice to animalese with it:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Blane187/animalese_RVC'}, 'url': 'https://huggingface.co/spaces/Blane187/animalese_RVC', 'raw': 'https://huggingface.co/spaces/Blane187/animalese_RVC'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""i'm just bored, so i make the project, lol"", 'raw': ""i'm just bored, so i make the project, lol""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","ehm, so today i've finished my new project + +https://huggingface.co/spaces/Blane187/animalese-py + +or you can make your voice to animalese with it: +https://huggingface.co/spaces/Blane187/animalese_RVC + +i'm just bored, so i make the project, lol + +",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2024-08-15 09:06:34,2024-08-15 09:06:34.270,[],/posts/Blane187/240020635038154,1212,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677141720071-634ff41ff32062e9eb7b06a3.jpeg,542.0,David Berenstein,davidberenstein1957,575318853216493,"[{'type': 'text', 'value': '📣 Introducing Dataset Viber: your chill repo for data collection, annotation and vibe checks! 🎉', 'raw': '📣 Introducing Dataset Viber: your chill repo for data collection, annotation and vibe checks! 🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've cooked up Dataset Viber, a set of cool tools designed to make data preparation for AI models easier, more approachable and enjoyable for standalone AI engineers and enthusiasts."", 'raw': ""I've cooked up Dataset Viber, a set of cool tools designed to make data preparation for AI models easier, more approachable and enjoyable for standalone AI engineers and enthusiasts.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔧 What Dataset Viber offers:', 'raw': '🔧 What Dataset Viber offers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- CollectorInterface: Lazily collect model interaction data without human annotation', 'raw': '- CollectorInterface: Lazily collect model interaction data without human annotation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AnnotatorInterface: Annotate your data with models in the loop', 'raw': '- AnnotatorInterface: Annotate your data with models in the loop'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- BulkInterface: Explore data distribution and annotate in bulk', 'raw': '- BulkInterface: Explore data distribution and annotate in bulk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Embedder: Efficiently embed data with ONNX-optimized speeds', 'raw': '- Embedder: Efficiently embed data with ONNX-optimized speeds'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Key features:', 'raw': '🎯 Key features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Supports various tasks for text, chat, and image modalities', 'raw': '- Supports various tasks for text, chat, and image modalities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Runs in .ipynb notebooks', 'raw': '- Runs in .ipynb notebooks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Logs data to local CSV or directly to Hugging Face Hub', 'raw': '- Logs data to local CSV or directly to Hugging Face Hub'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Easy to install via pip: ', 'raw': '- Easy to install via pip: '}, {'type': 'inline_code', 'code': 'pip install dataset-viber', 'raw': '`pip install dataset-viber`'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's not designed for team collaboration or production use, but rather as a fun and efficient toolkit for individual projects."", 'raw': ""It's not designed for team collaboration or production use, but rather as a fun and efficient toolkit for individual projects.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Want to give it a try? Check out the repository link ', 'raw': 'Want to give it a try? Check out the repository link '}, {'type': 'link', 'href': 'https://github.com/davidberenstein1957/dataset-viber/', 'raw': 'https://github.com/davidberenstein1957/dataset-viber/'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm excited to hear your feedback and learn how you vibe with your data. Feel free to open an issue or reach out if you have any questions or suggestions!"", 'raw': ""I'm excited to hear your feedback and learn how you vibe with your data. Feel free to open an issue or reach out if you have any questions or suggestions!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Some shoutouts:', 'raw': 'Some shoutouts:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Gradio for the amazing backbone', 'raw': '- Gradio for the amazing backbone'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Daniel van Strien for some initial presentations I did on vibe checks', 'raw': '- Daniel van Strien for some initial presentations I did on vibe checks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Emily Omier for the workshop on structuring GitHub repo READMEs', 'raw': '- Emily Omier for the workshop on structuring GitHub repo READMEs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Hamel Husain for keeping mentioning that people should look at their data.', 'raw': '- Hamel Husain for keeping mentioning that people should look at their data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Philipp Schmid for his code for ONNX feature-extractors', 'raw': '- Philipp Schmid for his code for ONNX feature-extractors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Ben Burtenshaw for the first PR', 'raw': '- Ben Burtenshaw for the first PR'}]","📣 Introducing Dataset Viber: your chill repo for data collection, annotation and vibe checks! 🎉 + +I've cooked up Dataset Viber, a set of cool tools designed to make data preparation for AI models easier, more approachable and enjoyable for standalone AI engineers and enthusiasts. + +🔧 What Dataset Viber offers: +- CollectorInterface: Lazily collect model interaction data without human annotation +- AnnotatorInterface: Annotate your data with models in the loop +- BulkInterface: Explore data distribution and annotate in bulk +- Embedder: Efficiently embed data with ONNX-optimized speeds + +🎯 Key features: +- Supports various tasks for text, chat, and image modalities +- Runs in .ipynb notebooks +- Logs data to local CSV or directly to Hugging Face Hub +- Easy to install via pip: `pip install dataset-viber` + +It's not designed for team collaboration or production use, but rather as a fun and efficient toolkit for individual projects. + +Want to give it a try? Check out the repository link https://github.com/davidberenstein1957/dataset-viber/. + +I'm excited to hear your feedback and learn how you vibe with your data. Feel free to open an issue or reach out if you have any questions or suggestions! + +Some shoutouts: +- Gradio for the amazing backbone +- Daniel van Strien for some initial presentations I did on vibe checks +- Emily Omier for the workshop on structuring GitHub repo READMEs +- Hamel Husain for keeping mentioning that people should look at their data. +- Philipp Schmid for his code for ONNX feature-extractors +- Ben Burtenshaw for the first PR","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/634ff41ff32062e9eb7b06a3/IwdjAcSyh4csCrY7VABng.mp4'}]",[],"[{'reaction': '👍', 'users': ['ajibawa-2023', 'Norod78', 'megoyaw3', 'Best-codes', 'Bruhn', 'osanseviero', 'Winnougan'], 'count': 7}, {'reaction': '👀', 'users': ['John6666', 'Hous94', 'megoyaw3', 'Best-codes'], 'count': 4}, {'reaction': '❤️', 'users': ['Best-codes', 'maywell', 'osanseviero', 'OrigamiDream'], 'count': 4}]",2024-08-15 07:54:24,2024-08-15 12:28:33.093,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/davidberenstein1957/575318853216493,1775,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,237798706580957,"[{'type': 'text', 'value': 'Remember when ', 'raw': 'Remember when '}, {'type': 'mention', 'user': 'mistralAI', 'raw': '@mistralAI'}, {'type': 'text', 'value': ' said large enough and casually dropped Mistral-Large-Instruct-2407? 🤯🚀', 'raw': ' said large enough and casually dropped Mistral-Large-Instruct-2407? 🤯🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's now on "", 'raw': ""It's now on ""}, {'type': 'link', 'href': 'http://lmsys.org', 'raw': 'http://lmsys.org'}, {'type': 'text', 'value': '! 🌐 It works amazing for instruction following, hard prompts, coding, and longer queries with only 123 billion parameters. 💡💻', 'raw': '! 🌐 It works amazing for instruction following, hard prompts, coding, and longer queries with only 123 billion parameters. 💡💻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It outperforms GPT4-Turbo and Claude 3 Opus on Coding, Hard Prompts, Math, and Longer Query categories. 📈🔢', 'raw': 'It outperforms GPT4-Turbo and Claude 3 Opus on Coding, Hard Prompts, Math, and Longer Query categories. 📈🔢'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It also outperforms Llama 3.1 405B on Instruction Following while being 3x smaller. 🐎🔍', 'raw': 'It also outperforms Llama 3.1 405B on Instruction Following while being 3x smaller. 🐎🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It also does exceedingly well on the Ai2 ZebraLogic logistic reasoning benchmark despite being much smaller than the other models. 🦓🤔', 'raw': 'It also does exceedingly well on the Ai2 ZebraLogic logistic reasoning benchmark despite being much smaller than the other models. 🦓🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mistral is not here to take part but to take over! 🏆🌟', 'raw': 'Mistral is not here to take part but to take over! 🏆🌟'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'link', 'href': 'https://mistral.ai/news/mistral-large-2407/', 'raw': 'https://mistral.ai/news/mistral-large-2407/'}]","Remember when @mistralAI said large enough and casually dropped Mistral-Large-Instruct-2407? 🤯🚀 + +It's now on http://lmsys.org! 🌐 It works amazing for instruction following, hard prompts, coding, and longer queries with only 123 billion parameters. 💡💻 + +It outperforms GPT4-Turbo and Claude 3 Opus on Coding, Hard Prompts, Math, and Longer Query categories. 📈🔢 + +It also outperforms Llama 3.1 405B on Instruction Following while being 3x smaller. 🐎🔍 + +It also does exceedingly well on the Ai2 ZebraLogic logistic reasoning benchmark despite being much smaller than the other models. 🦓🤔 + +Mistral is not here to take part but to take over! 🏆🌟 + +Model: https://mistral.ai/news/mistral-large-2407/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/kDjE4vVAZiQl8xRhzGyJV.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/XkLj-gyGSUb7RbBPJDgAU.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/zWxn998ipRvAL947rld2i.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/XA8TXjgwoG76kN47tm6tl.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/LcdGnB4OkILnFjch4TqPP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/tqEYRVdNwG_oYZlwUsM84.png'}]",[],"[{'reaction': '🚀', 'users': ['prithivMLmods'], 'count': 1}]",2024-08-08 09:13:56,2024-08-08 09:13:56.197,[],/posts/singhsidhukuldeep/237798706580957,649,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e2f1cb4dbf9514fb475b48/0EwhfSfMCy8P2e7nJWaOO.jpeg,41.0,Rico Ardiansyah,Blane187,770990891104334,"[{'type': 'text', 'value': 'hello everyone, today I have been working on a project ', 'raw': 'hello everyone, today I have been working on a project '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Blane187/rvc-demo'}, 'url': 'https://huggingface.co/spaces/Blane187/rvc-demo', 'raw': 'https://huggingface.co/spaces/Blane187/rvc-demo'}, {'type': 'text', 'value': "", a demo of rvc using pip, this project is still a demo though (I don't have a beta tester lol)"", 'raw': "", a demo of rvc using pip, this project is still a demo though (I don't have a beta tester lol)""}]","hello everyone, today I have been working on a project https://huggingface.co/spaces/Blane187/rvc-demo, a demo of rvc using pip, this project is still a demo though (I don't have a beta tester lol)",[],[],"[{'reaction': '👍', 'users': ['John6666'], 'count': 1}]",2024-08-08 00:36:03,2024-08-12 06:51:38.252,[],/posts/Blane187/770990891104334,1456,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/631b7370bf1351ed2bd0abdc/p0ZRMjgp5mt3sT5OhdHsp.png,17.0,Jonathan Lorraine,lorraine2,588623583782274,"[{'type': 'text', 'value': '⚡ My PhD thesis, “Scalable Nested Optimization for Deep Learning,” is now on arXiv! ⚡', 'raw': '⚡ My PhD thesis, “Scalable Nested Optimization for Deep Learning,” is now on arXiv! ⚡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'tl;dr: We develop various optimization tools with highlights, including:', 'raw': 'tl;dr: We develop various optimization tools with highlights, including:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '· Making the momentum coefficient complex for adversarial games like GANs.', 'raw': '· Making the momentum coefficient complex for adversarial games like GANs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '· Optimizing millions of hyperparameters using implicit differentiation.', 'raw': '· Optimizing millions of hyperparameters using implicit differentiation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '· Tuning hyperparameters using hypernetworks.', 'raw': '· Tuning hyperparameters using hypernetworks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '· Differentiably finding bifurcations in optimization for diverse solutions.', 'raw': '· Differentiably finding bifurcations in optimization for diverse solutions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/abs/2407.01526', 'raw': 'https://arxiv.org/abs/2407.01526'}]","⚡ My PhD thesis, “Scalable Nested Optimization for Deep Learning,” is now on arXiv! ⚡ + +tl;dr: We develop various optimization tools with highlights, including: +· Making the momentum coefficient complex for adversarial games like GANs. +· Optimizing millions of hyperparameters using implicit differentiation. +· Tuning hyperparameters using hypernetworks. +· Differentiably finding bifurcations in optimization for diverse solutions. + +https://arxiv.org/abs/2407.01526",[],[],"[{'reaction': '🔥', 'users': ['nbroad', 'ajibawa-2023', 'merve', 'MexIvanov', 'rkusch', 'Joseph717171', 'osanseviero', 'lorraine2', 'not-lain', 'fsaudm'], 'count': 10}, {'reaction': '👍', 'users': ['Jaward', 'merve', 'MexIvanov', 'Joseph717171', 'Aurelien-Morgan', 'lorraine2', 'not-lain'], 'count': 7}]",2024-08-07 21:26:07,2024-08-09 08:04:08.535,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}]",/posts/lorraine2/588623583782274,2715,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,116037709746691,"[{'type': 'text', 'value': ""Is your summer reading list still empty? Curious if an LLM can generate a book blurb you'd enjoy and help build a KTO preference dataset at the same time? "", 'raw': ""Is your summer reading list still empty? Curious if an LLM can generate a book blurb you'd enjoy and help build a KTO preference dataset at the same time? ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A demo using Hugging Face Spaces and Gradio to collect LLM output preferences: ', 'raw': 'A demo using Hugging Face Spaces and Gradio to collect LLM output preferences: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'davanstrien/would-you-read-it'}, 'url': 'https://huggingface.co/spaces/davanstrien/would-you-read-it', 'raw': 'https://huggingface.co/spaces/davanstrien/would-you-read-it'}]","Is your summer reading list still empty? Curious if an LLM can generate a book blurb you'd enjoy and help build a KTO preference dataset at the same time? + +A demo using Hugging Face Spaces and Gradio to collect LLM output preferences: https://huggingface.co/spaces/davanstrien/would-you-read-it","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60107b385ac3e86b3ea4fc34/9UU_lKtzxAwllYZs3RqfD.mp4'}]",[],"[{'reaction': '❤️', 'users': ['prithivMLmods', 'joey00072', 'nbroad', 'Saugatkafley', 'maywell', 'Ramikan-BR', 'philipp-zettl', 'GPT007', 'osanseviero'], 'count': 9}, {'reaction': '👀', 'users': ['SicariusSicariiStuff'], 'count': 1}]",2024-08-07 14:37:24,2024-08-08 23:35:32.374,"[{'_id': '6569216f9c96f1a47bf45788', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6569216f9c96f1a47bf45788/mCLqmAs4dOjKdxNQVAp1w.png', 'fullname': 'Sica Rius', 'name': 'SicariusSicariiStuff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 388, 'isFollowing': False}]",/posts/davanstrien/116037709746691,3167,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63444f2687964b331809eb55/WvZivsvKsM_t0tBtakovK.png,89.0,t.d.a.g.,sequelbox,939202255393950,"[{'type': 'text', 'value': 'Just released: Shining Valiant 2 for Llama 3.1 8b!', 'raw': 'Just released: Shining Valiant 2 for Llama 3.1 8b!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- the first SV at 8b size, using the best 8b model', 'raw': '- the first SV at 8b size, using the best 8b model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- newest version of the SV dataset improves specialist knowledge and response consistency', 'raw': '- newest version of the SV dataset improves specialist knowledge and response consistency'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3.1 70b will be coming but our next releases will focus on expanding the Build Tools lineup. Get ready for some open-source synthetic datasets made with 3.1 405, coming VERY soon :)', 'raw': '3.1 70b will be coming but our next releases will focus on expanding the Build Tools lineup. Get ready for some open-source synthetic datasets made with 3.1 405, coming VERY soon :)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'get SV2: ', 'raw': 'get SV2: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ValiantLabs/Llama3.1-8B-ShiningValiant2'}, 'url': 'https://huggingface.co/ValiantLabs/Llama3.1-8B-ShiningValiant2', 'raw': 'https://huggingface.co/ValiantLabs/Llama3.1-8B-ShiningValiant2'}, {'type': 'new_line', 'raw': '\n'}]","Just released: Shining Valiant 2 for Llama 3.1 8b! + +- the first SV at 8b size, using the best 8b model +- newest version of the SV dataset improves specialist knowledge and response consistency + +3.1 70b will be coming but our next releases will focus on expanding the Build Tools lineup. Get ready for some open-source synthetic datasets made with 3.1 405, coming VERY soon :) + +get SV2: https://huggingface.co/ValiantLabs/Llama3.1-8B-ShiningValiant2 +",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'nbroad', 'merve'], 'count': 3}]",2024-08-07 13:31:09,2024-08-09 00:19:40.546,"[{'_id': '6569216f9c96f1a47bf45788', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6569216f9c96f1a47bf45788/mCLqmAs4dOjKdxNQVAp1w.png', 'fullname': 'Sica Rius', 'name': 'SicariusSicariiStuff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 388, 'isFollowing': False}]",/posts/sequelbox/939202255393950,1702,,1 +/avatars/a680a89b842560454720e14be412409e.svg,,Alex Rus,gadget01,428754145152300,"[{'type': 'mention', 'user': 'warmshao', 'raw': '@warmshao'}, {'type': 'text', 'value': ' Hello. do you do any paid consulting?', 'raw': ' Hello. do you do any paid consulting?'}]",@warmshao Hello. do you do any paid consulting?,[],"[{'_id': '6669ad939f5409d36228b8d3', 'avatarUrl': '/avatars/0f6736c80b1fbedd550e440d85f959dd.svg', 'fullname': 'wenshao', 'name': 'warmshao', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}]",[],2024-08-07 12:52:13,2024-08-07 12:57:14.876,"[{'_id': '6669ad939f5409d36228b8d3', 'avatarUrl': '/avatars/0f6736c80b1fbedd550e440d85f959dd.svg', 'fullname': 'wenshao', 'name': 'warmshao', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}]",/posts/gadget01/428754145152300,589,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/60f2fc91b92afccb7c34b8ed/W2-Nay12Ef4Ltyaf8EKE9.jpeg,118.0,Gabriel Martín Blázquez,gabrielmbmb,323184025352600,"[{'type': 'text', 'value': 'distilabel 1.3.0 is out! This release contains many core improvements and new tasks that help us building ', 'raw': 'distilabel 1.3.0 is out! This release contains many core improvements and new tasks that help us building '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'argilla/magpie-ultra-v0.1'}, 'url': 'https://huggingface.co/datasets/argilla/magpie-ultra-v0.1', 'raw': 'https://huggingface.co/datasets/argilla/magpie-ultra-v0.1'}, {'type': 'text', 'value': '!', 'raw': '!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Distributed pipeline execution with Ray, new Magpie tasks, reward models, components for dataset diversity based on sentence embeddings, Argilla 2.0 compatibility and many more features!', 'raw': 'Distributed pipeline execution with Ray, new Magpie tasks, reward models, components for dataset diversity based on sentence embeddings, Argilla 2.0 compatibility and many more features!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check the new release in GitHub: ', 'raw': 'Check the new release in GitHub: '}, {'type': 'link', 'href': 'https://github.com/argilla-io/distilabel', 'raw': 'https://github.com/argilla-io/distilabel'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","distilabel 1.3.0 is out! This release contains many core improvements and new tasks that help us building https://huggingface.co/datasets/argilla/magpie-ultra-v0.1! + +Distributed pipeline execution with Ray, new Magpie tasks, reward models, components for dataset diversity based on sentence embeddings, Argilla 2.0 compatibility and many more features! + +Check the new release in GitHub: https://github.com/argilla-io/distilabel + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f2fc91b92afccb7c34b8ed/ZBkhqcOozgq72H0qQM1yX.png'}]",[],"[{'reaction': '❤️', 'users': ['plaguss', 'nataliaElv', 'sdiazlor', 'KvrParaskevi', 'anakin87', 'not-lain', 'osanseviero'], 'count': 7}, {'reaction': '🚀', 'users': ['gabrielmbmb', 'sdiazlor', 'not-lain', 'louisbrulenaudet'], 'count': 4}, {'reaction': '🔥', 'users': ['prithivMLmods', 'not-lain'], 'count': 2}]",2024-08-07 10:12:33,2024-08-07 10:12:33.695,[],/posts/gabrielmbmb/323184025352600,2929,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/640e21ef3c82bd463ee5a76d/nVR1DFPAsiLw6Boys28Rb.jpeg,112.0,Dana Aubakirova,danaaubakirova,387162624073548,"[{'type': 'text', 'value': ' 🚀 We are thrilled to introduce TextImage Data Augmentation, developed in collaboration with Albumentations AI! ✨ This multimodal technique modifies document images and text simultaneously, enhancing Vision Language Models (VLMs) for high-text datasets.', 'raw': ' 🚀 We are thrilled to introduce TextImage Data Augmentation, developed in collaboration with Albumentations AI! ✨ This multimodal technique modifies document images and text simultaneously, enhancing Vision Language Models (VLMs) for high-text datasets.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👩\u200d💻 Learn how this innovative approach can improve your document AI projects by checking out our full blog post here: ', 'raw': '👩\u200d💻 Learn how this innovative approach can improve your document AI projects by checking out our full blog post here: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/doc_aug_hf_alb', 'raw': 'https://huggingface.co/blog/doc_aug_hf_alb'}]"," 🚀 We are thrilled to introduce TextImage Data Augmentation, developed in collaboration with Albumentations AI! ✨ This multimodal technique modifies document images and text simultaneously, enhancing Vision Language Models (VLMs) for high-text datasets. + +👩‍💻 Learn how this innovative approach can improve your document AI projects by checking out our full blog post here: https://huggingface.co/blog/doc_aug_hf_alb",[],[],"[{'reaction': '🔥', 'users': ['ajibawa-2023'], 'count': 1}, {'reaction': '❤️', 'users': ['ijohn07'], 'count': 1}]",2024-08-07 06:18:54,2024-08-08 04:29:01.150,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/danaaubakirova/387162624073548,1440,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,558658964736826,"[{'type': 'text', 'value': '🗓️ Remember when last April, ', 'raw': '🗓️ Remember when last April, '}, {'type': 'mention', 'user': 'Meta', 'raw': '@Meta'}, {'type': 'text', 'value': ' released Segment Anything Model (SAM) paper and it was too good to be true. 🤯', 'raw': ' released Segment Anything Model (SAM) paper and it was too good to be true. 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""They have now released Segment Anything Model 2 (SAM 2) and it's mind-blowingly great! 🚀"", 'raw': ""They have now released Segment Anything Model 2 (SAM 2) and it's mind-blowingly great! 🚀""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SAM 2 is the first unified model for segmenting objects across images and videos. You can use a click, box, or mask as the input to select an object on any image or frame of video. 🖼️📹', 'raw': 'SAM 2 is the first unified model for segmenting objects across images and videos. You can use a click, box, or mask as the input to select an object on any image or frame of video. 🖼️📹'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SAM consists of an image encoder to encode images, a prompt encoder to encode prompts, then outputs of these two are given to a mask decoder to generate masks. 🎭', 'raw': 'SAM consists of an image encoder to encode images, a prompt encoder to encode prompts, then outputs of these two are given to a mask decoder to generate masks. 🎭'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The biggest jump of SAM2 from SAM is using memory to have consistent masking across frames! They call it masklet prediction! 🧠', 'raw': 'The biggest jump of SAM2 from SAM is using memory to have consistent masking across frames! They call it masklet prediction! 🧠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'They have also released the dataset, SA-V ', 'raw': 'They have also released the dataset, SA-V '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset is truly huge, with 190.9K manual annotations and 451.7K automatic! 📊', 'raw': 'This dataset is truly huge, with 190.9K manual annotations and 451.7K automatic! 📊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'link', 'href': 'https://ai.meta.com/research/publications/sam-2-segment-anything-in-images-and-videos/', 'raw': 'https://ai.meta.com/research/publications/sam-2-segment-anything-in-images-and-videos/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Blog: ', 'raw': '📝 Blog: '}, {'type': 'link', 'href': 'https://ai.meta.com/sam2/', 'raw': 'https://ai.meta.com/sam2/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Demo: ', 'raw': '🔗 Demo: '}, {'type': 'link', 'href': 'https://sam2.metademolab.com/demo', 'raw': 'https://sam2.metademolab.com/demo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💾 Model Weights: ', 'raw': '💾 Model Weights: '}, {'type': 'link', 'href': 'https://github.com/facebookresearch/segment-anything-2/blob/main/checkpoints/download_ckpts.sh', 'raw': 'https://github.com/facebookresearch/segment-anything-2/blob/main/checkpoints/download_ckpts.sh'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Dataset: ', 'raw': '📁 Dataset: '}, {'type': 'link', 'href': 'https://ai.meta.com/datasets/segment-anything-video-downloads/', 'raw': 'https://ai.meta.com/datasets/segment-anything-video-downloads/'}]","🗓️ Remember when last April, @Meta released Segment Anything Model (SAM) paper and it was too good to be true. 🤯 + +They have now released Segment Anything Model 2 (SAM 2) and it's mind-blowingly great! 🚀 + +SAM 2 is the first unified model for segmenting objects across images and videos. You can use a click, box, or mask as the input to select an object on any image or frame of video. 🖼️📹 + +SAM consists of an image encoder to encode images, a prompt encoder to encode prompts, then outputs of these two are given to a mask decoder to generate masks. 🎭 + +The biggest jump of SAM2 from SAM is using memory to have consistent masking across frames! They call it masklet prediction! 🧠 + +They have also released the dataset, SA-V +This dataset is truly huge, with 190.9K manual annotations and 451.7K automatic! 📊 + +📄 Paper: https://ai.meta.com/research/publications/sam-2-segment-anything-in-images-and-videos/ + +📝 Blog: https://ai.meta.com/sam2/ + +🔗 Demo: https://sam2.metademolab.com/demo + +💾 Model Weights: https://github.com/facebookresearch/segment-anything-2/blob/main/checkpoints/download_ckpts.sh + +📁 Dataset: https://ai.meta.com/datasets/segment-anything-video-downloads/","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/pD6w28UJkn4hUprT_ryga.mp4'}]","[{'_id': '61e8c67cee1e1440121f0240', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61e8c67cee1e1440121f0240/7akLZSb6xJZzbnl0o9Cod.jpeg', 'fullname': 'Jonas', 'name': 'Meta', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '🔥', 'users': ['louisbrulenaudet', 'Ramikan-BR', 'NHLOCAL', 'Glavin001', 'nbroad', 'ajibawa-2023', 'myjyjy', 'osanseviero'], 'count': 8}]",2024-08-06 22:15:44,2024-08-08 01:00:17.544,"[{'_id': '6215ce9abfcb3893344dd0a2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6215ce9abfcb3893344dd0a2/3afbq35YvIPjbNFi-J7TO.png', 'fullname': 'Cross', 'name': 'dillfrescott', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 53, 'isFollowing': False}]",/posts/singhsidhukuldeep/558658964736826,2185,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/648a374f00f7a3374ee64b99/YPwSOrronoozwHbJchPn3.jpeg,246.0,Caleb Fahlgren,cfahlgren1,989495268511956,"[{'type': 'text', 'value': 'You can now embed your heatmap anywhere with a simple change :)', 'raw': 'You can now embed your heatmap anywhere with a simple change :)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Currently just supports model creation! You can duplicate the space and create your own here:', 'raw': 'Currently just supports model creation! You can duplicate the space and create your own here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'cfahlgren1/my-heatmap'}, 'url': 'https://huggingface.co/spaces/cfahlgren1/my-heatmap', 'raw': 'https://huggingface.co/spaces/cfahlgren1/my-heatmap'}, {'type': 'new_line', 'raw': '\n'}]","You can now embed your heatmap anywhere with a simple change :) + +Currently just supports model creation! You can duplicate the space and create your own here: + +https://huggingface.co/spaces/cfahlgren1/my-heatmap +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/648a374f00f7a3374ee64b99/G_XPKyQidDIEd8BbNSOx_.png'}]",[],"[{'reaction': '👀', 'users': ['Nymbo'], 'count': 1}]",2024-08-06 20:22:43,2024-08-06 21:36:01.520,"[{'_id': '648a374f00f7a3374ee64b99', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/648a374f00f7a3374ee64b99/YPwSOrronoozwHbJchPn3.jpeg', 'fullname': 'Caleb Fahlgren', 'name': 'cfahlgren1', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 246, 'isFollowing': False}]",/posts/cfahlgren1/989495268511956,1051,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg,32.0,Firstname Lastname,takeraparterer,328474988002537,"[{'type': 'text', 'value': 'They should make a thing like google colab but you can have unlimited free access to a whole datacenter that would be cool. like if you agree', 'raw': 'They should make a thing like google colab but you can have unlimited free access to a whole datacenter that would be cool. like if you agree'}]",They should make a thing like google colab but you can have unlimited free access to a whole datacenter that would be cool. like if you agree,[],[],"[{'reaction': '❤️', 'users': ['takeraparterer', 'hawaee', 'holooo', 'ashercn97', 'Blane187', 'jnh-ordbogen'], 'count': 6}, {'reaction': '➕', 'users': ['VOiceOwl', 'thethinkmachine', 'takeraparterer'], 'count': 3}]",2024-07-31 08:41:42,2024-08-05 20:48:13.236,"[{'_id': '63de560a15266dd945f209ca', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63de560a15266dd945f209ca/PeZf3IF-x7Qh8OcnKH12R.png', 'fullname': 'MrDragonFox', 'name': 'MrDragonFox', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 95, 'isFollowing': False}, {'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}, {'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}]",/posts/takeraparterer/328474988002537,2139,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,238095044062491,"[{'type': 'text', 'value': '𝗧𝗵𝗲 𝗵𝘂𝗴𝗲 𝗰𝗼𝘀𝘁 𝗼𝗳 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝗼𝗻 𝗳𝗿𝗼𝗻𝘁𝗶𝗲𝗿 𝗟𝗟𝗠𝘀 💸', 'raw': '𝗧𝗵𝗲 𝗵𝘂𝗴𝗲 𝗰𝗼𝘀𝘁 𝗼𝗳 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝗼𝗻 𝗳𝗿𝗼𝗻𝘁𝗶𝗲𝗿 𝗟𝗟𝗠𝘀 💸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Google DeepMind recently released a great paper that shows optimal hyperparameters to train across different regimes: Scaling Exponents Across Parameterizations and Optimizers, with data from 10,000 training runs.', 'raw': 'Google DeepMind recently released a great paper that shows optimal hyperparameters to train across different regimes: Scaling Exponents Across Parameterizations and Optimizers, with data from 10,000 training runs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'One engineer decided to quantify the price of such a large-scale experiment.', 'raw': 'One engineer decided to quantify the price of such a large-scale experiment.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '😬 And the bill is hefty: ~13M USD ', 'raw': '😬 And the bill is hefty: ~13M USD '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This exact number is to take with a grain of salt because many approximations were necessary to get the final result.', 'raw': 'This exact number is to take with a grain of salt because many approximations were necessary to get the final result.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⛔️ But still this ballpark means that for this sole experiment, the price is way over what most startups or research labs could afford.', 'raw': '⛔️ But still this ballpark means that for this sole experiment, the price is way over what most startups or research labs could afford.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This means that open-sourcing research is more important than ever, to put everyone in the ecosystem on a roughly equal footing. Don't let OpenAI run first, they'll keep everything for themselves!"", 'raw': ""This means that open-sourcing research is more important than ever, to put everyone in the ecosystem on a roughly equal footing. Don't let OpenAI run first, they'll keep everything for themselves!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Read the full post that quantifies the paper's cost 👉 "", 'raw': ""Read the full post that quantifies the paper's cost 👉 ""}, {'type': 'link', 'href': 'https://152334h.github.io/blog/scaling-exponents/', 'raw': 'https://152334h.github.io/blog/scaling-exponents/'}]","𝗧𝗵𝗲 𝗵𝘂𝗴𝗲 𝗰𝗼𝘀𝘁 𝗼𝗳 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝗼𝗻 𝗳𝗿𝗼𝗻𝘁𝗶𝗲𝗿 𝗟𝗟𝗠𝘀 💸 + +Google DeepMind recently released a great paper that shows optimal hyperparameters to train across different regimes: Scaling Exponents Across Parameterizations and Optimizers, with data from 10,000 training runs. + +One engineer decided to quantify the price of such a large-scale experiment. + +😬 And the bill is hefty: ~13M USD + +This exact number is to take with a grain of salt because many approximations were necessary to get the final result. + +⛔️ But still this ballpark means that for this sole experiment, the price is way over what most startups or research labs could afford. + +This means that open-sourcing research is more important than ever, to put everyone in the ecosystem on a roughly equal footing. Don't let OpenAI run first, they'll keep everything for themselves! + +Read the full post that quantifies the paper's cost 👉 https://152334h.github.io/blog/scaling-exponents/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/ycHLDaDRYPEfxP50yTJV8.png'}]",[],"[{'reaction': '👀', 'users': ['nbroad', 'MicPie', 'louisbrulenaudet', 'adamo1139', 'victor'], 'count': 5}, {'reaction': '🔥', 'users': ['pduf'], 'count': 1}]",2024-07-30 15:08:55,2024-07-31 06:31:41.589,"[{'_id': '66a8985e5bd5c24dec21f0bd', 'avatarUrl': '/avatars/7411dc98c012221f8e4c3641c8702640.svg', 'fullname': 'juliawhites', 'name': 'juliawhites', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/m-ric/238095044062491,2262,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1669551186189-63732ebbbd81fae2b3aaf3fb.jpeg,296.0,Knut Jägersberg,KnutJaegersberg,675597604860526,"[{'type': 'text', 'value': 'neuralmagic/Meta-Llama-3.1-405B-Instruct-FP8', 'raw': 'neuralmagic/Meta-Llama-3.1-405B-Instruct-FP8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Requant of the big llama, using 20% less memory ', 'raw': 'Requant of the big llama, using 20% less memory '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/neuralmagic/Meta-Llama-3.1-405B-Instruct-FP8', 'raw': 'https://huggingface.co/neuralmagic/Meta-Llama-3.1-405B-Instruct-FP8'}]","neuralmagic/Meta-Llama-3.1-405B-Instruct-FP8 + +Requant of the big llama, using 20% less memory + +https://huggingface.co/neuralmagic/Meta-Llama-3.1-405B-Instruct-FP8",[],[],[],2024-07-30 15:03:52,2024-07-30 15:03:52.836,[],/posts/KnutJaegersberg/675597604860526,925,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1649681653581-5f7fbd813e94f16a85448745.jpeg,657.0,Sayak Paul,sayakpaul,772170903237816,"[{'type': 'text', 'value': ""With larger and larger diffusion transformers coming up, it's becoming increasingly important to have some good quantization tools for them."", 'raw': ""With larger and larger diffusion transformers coming up, it's becoming increasingly important to have some good quantization tools for them.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We present our findings from a series of experiments on quantizing different diffusion pipelines based on diffusion transformers. ', 'raw': 'We present our findings from a series of experiments on quantizing different diffusion pipelines based on diffusion transformers. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We demonstrate excellent memory savings with a bit of sacrifice on inference latency which is expected to improve in the coming days. ', 'raw': 'We demonstrate excellent memory savings with a bit of sacrifice on inference latency which is expected to improve in the coming days. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Diffusers 🤝 Quanto ❤️', 'raw': 'Diffusers 🤝 Quanto ❤️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This was a juicy collaboration between ', 'raw': 'This was a juicy collaboration between '}, {'type': 'mention', 'user': 'dacorvo', 'raw': '@dacorvo'}, {'type': 'text', 'value': ' and myself. ', 'raw': ' and myself. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the post to learn all about it', 'raw': 'Check out the post to learn all about it'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/quanto-diffusers', 'raw': 'https://huggingface.co/blog/quanto-diffusers'}]","With larger and larger diffusion transformers coming up, it's becoming increasingly important to have some good quantization tools for them. + +We present our findings from a series of experiments on quantizing different diffusion pipelines based on diffusion transformers. + +We demonstrate excellent memory savings with a bit of sacrifice on inference latency which is expected to improve in the coming days. + +Diffusers 🤝 Quanto ❤️ + +This was a juicy collaboration between @dacorvo and myself. + +Check out the post to learn all about it +https://huggingface.co/blog/quanto-diffusers","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f7fbd813e94f16a85448745/eAQbBBt41KngCcHYhT0iv.png'}]","[{'_id': '647995564be04c76ce4547b3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/647995564be04c76ce4547b3/KpP0yuQMsqb-z6N9h4Ykg.jpeg', 'fullname': 'David Corvoysier', 'name': 'dacorvo', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 52}]","[{'reaction': '👀', 'users': ['John6666', 'victor', 'thliang01', 'TuringsSolutions', 'YaTharThShaRma999', 'not-lain', 'louisbrulenaudet', 'Hev832'], 'count': 8}, {'reaction': '🔥', 'users': ['YaTharThShaRma999', 'Bruhn', 'not-lain', 'EtienneDosSantos', 'rbgo'], 'count': 5}]",2024-07-30 04:26:59,2024-07-31 18:11:01.286,"[{'_id': '65acc58c14d782df067f759b', 'avatarUrl': '/avatars/52a153d04d325469e1be69bce610ebe5.svg', 'fullname': 'Tan Hong Kai', 'name': 'ecyht2', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '6569216f9c96f1a47bf45788', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6569216f9c96f1a47bf45788/mCLqmAs4dOjKdxNQVAp1w.png', 'fullname': 'Sica Rius', 'name': 'SicariusSicariiStuff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 388, 'isFollowing': False}]",/posts/sayakpaul/772170903237816,3842,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,522286906747249,"[{'type': 'text', 'value': 'Kling AI Video is FINALLY Public (All Countries), Free to Use and MIND BLOWING - Full Tutorial > ', 'raw': 'Kling AI Video is FINALLY Public (All Countries), Free to Use and MIND BLOWING - Full Tutorial > '}, {'type': 'link', 'href': 'https://youtu.be/zcpqAxYV1_w', 'raw': 'https://youtu.be/zcpqAxYV1_w'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""You probably seen those mind blowing AI made videos. And the day has arrived. The famous Kling AI is now worldwide available for free. In this tutorial video I will show you how to register for free with just email to Kling AI and use its mind blowing text to video animation, image to video animation and text to image, and image to image capabilities. This video will show you non-cherry pick results so you will know the actual quality and capability of the model unlike those extremely cherry pick example demos. Still, #KlingAI is the only #AI model that competes with OpenAI's #SORA and it is real to use."", 'raw': ""You probably seen those mind blowing AI made videos. And the day has arrived. The famous Kling AI is now worldwide available for free. In this tutorial video I will show you how to register for free with just email to Kling AI and use its mind blowing text to video animation, image to video animation and text to image, and image to image capabilities. This video will show you non-cherry pick results so you will know the actual quality and capability of the model unlike those extremely cherry pick example demos. Still, #KlingAI is the only #AI model that competes with OpenAI's #SORA and it is real to use.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Kling AI Official Website ⤵️', 'raw': '🔗 Kling AI Official Website ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.klingai.com/', 'raw': 'https://www.klingai.com/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 SECourses Discord Channel to Get Full Support ⤵️', 'raw': '🔗 SECourses Discord Channel to Get Full Support ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388', 'raw': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Our GitHub Repository ⤵️', 'raw': '🔗 Our GitHub Repository ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://github.com/FurkanGozukara/Stable-Diffusion', 'raw': 'https://github.com/FurkanGozukara/Stable-Diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Our Reddit ⤵️', 'raw': '🔗 Our Reddit ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '▶️ ', 'raw': '▶️ '}, {'type': 'link', 'href': 'https://www.reddit.com/r/SECourses/', 'raw': 'https://www.reddit.com/r/SECourses/'}]","Kling AI Video is FINALLY Public (All Countries), Free to Use and MIND BLOWING - Full Tutorial > https://youtu.be/zcpqAxYV1_w + +You probably seen those mind blowing AI made videos. And the day has arrived. The famous Kling AI is now worldwide available for free. In this tutorial video I will show you how to register for free with just email to Kling AI and use its mind blowing text to video animation, image to video animation and text to image, and image to image capabilities. This video will show you non-cherry pick results so you will know the actual quality and capability of the model unlike those extremely cherry pick example demos. Still, #KlingAI is the only #AI model that competes with OpenAI's #SORA and it is real to use. + +🔗 Kling AI Official Website ⤵️ +▶️ https://www.klingai.com/ + +🔗 SECourses Discord Channel to Get Full Support ⤵️ +▶️ https://discord.com/servers/software-engineering-courses-secourses-772774097734074388 + +🔗 Our GitHub Repository ⤵️ +▶️ https://github.com/FurkanGozukara/Stable-Diffusion + +🔗 Our Reddit ⤵️ +▶️ https://www.reddit.com/r/SECourses/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/iPhRhARt4OkHXk5di2889.png'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'ZeroWw', 'GPT007', 'amosgyamfi', 'onlinework', 'Ripp218', 'NeoTokyoUnicorn', 'John6666', 'petresil', 'rreed', 'cctuan', 'De4dBoris'], 'count': 12}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'Ripp218', 'GPT007', 'efecelik', 'De4dBoris'], 'count': 5}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'GPT007', 'abezzarg', 'De4dBoris'], 'count': 4}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'GPT007', 'danielus'], 'count': 3}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'GPT007', 'louisbrulenaudet'], 'count': 3}, {'reaction': '🤗', 'users': ['MonsterMMORPG', 'GPT007'], 'count': 2}, {'reaction': '😎', 'users': ['MonsterMMORPG', 'GPT007'], 'count': 2}, {'reaction': '➕', 'users': ['MonsterMMORPG', 'GPT007'], 'count': 2}, {'reaction': '🤝', 'users': ['MonsterMMORPG', 'GPT007'], 'count': 2}, {'reaction': '🤯', 'users': ['MonsterMMORPG', 'GPT007'], 'count': 2}, {'reaction': '🧠', 'users': ['MonsterMMORPG', 'GPT007'], 'count': 2}]",2024-07-30 00:53:00,2024-12-04 08:42:56.527,"[{'_id': '64a43122e3cf200cbf8a03b3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64a43122e3cf200cbf8a03b3/xilplM2M8Sjn3jLGyvwma.jpeg', 'fullname': 'Byte', 'name': 'CyberNative', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '6345bd89fe134dfd7a0dba40', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg', 'fullname': 'Furkan Gözükara', 'name': 'MonsterMMORPG', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 646, 'isFollowing': False}, {'_id': '655a741fec17c88302d33961', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/oAbpDk8u836RDLtQCczTg.png', 'fullname': ' ', 'name': 'NeoTokyoUnicorn', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6720a5e3262e65053af19c1e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/VU_XmyTsoSN9IKYCnGuxI.jpeg', 'fullname': 'nguyen', 'name': 'toanhihi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6343c29f133ee9022f132d4b', 'avatarUrl': '/avatars/8244e1fb86f07e8876f99e6e5cb254fe.svg', 'fullname': 'Thomas Mak', 'name': 'Thomas001', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/MonsterMMORPG/522286906747249,10638,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/641a05a7f5e9c66105fec9b2/ipSL2GFTQHLdQqDQsCmPy.png,33.0,Artur Lauche,Artples,551816601997074,"[{'type': 'text', 'value': ""Looking for a combination of speed and quality? Look no further! I've created a space that merges Open WebUI's excellent interface and features with the lightning-fast performance of the Groq API. Experience top-tier models in no time. Try it out for free here:"", 'raw': ""Looking for a combination of speed and quality? Look no further! I've created a space that merges Open WebUI's excellent interface and features with the lightning-fast performance of the Groq API. Experience top-tier models in no time. Try it out for free here:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'L-AI/groq-chat'}, 'url': 'https://huggingface.co/spaces/L-AI/groq-chat', 'raw': 'https://huggingface.co/spaces/L-AI/groq-chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '""A big thank you to Groq for providing their fantastic API at no cost!""', 'raw': '""A big thank you to Groq for providing their fantastic API at no cost!""'}]","Looking for a combination of speed and quality? Look no further! I've created a space that merges Open WebUI's excellent interface and features with the lightning-fast performance of the Groq API. Experience top-tier models in no time. Try it out for free here: +https://huggingface.co/spaces/L-AI/groq-chat + +""A big thank you to Groq for providing their fantastic API at no cost!""",[],[],"[{'reaction': '🚀', 'users': ['Artples', 'amosgyamfi', 'Ripp218', 'snakeying', 'ygmpkk', 'efecelik'], 'count': 6}, {'reaction': '❤️', 'users': ['ijohn07'], 'count': 1}]",2024-07-29 16:33:35,2024-07-29 16:33:35.989,[],/posts/Artples/551816601997074,2588,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/60a76b174e24361791fe822d/inEvYwrd4z0xvRQN3ikdE.jpeg,160.0,Sylvain Lesage,severo,968846619112965,"[{'type': 'mention', 'user': 'MaartenGr', 'raw': '@MaartenGr'}, {'type': 'text', 'value': ' nice post ', 'raw': ' nice post '}, {'type': 'link', 'href': 'https://newsletter.maartengrootendorst.com/p/a-visual-guide-to-quantization', 'raw': 'https://newsletter.maartengrootendorst.com/p/a-visual-guide-to-quantization'}, {'type': 'text', 'value': ' (""A Visual Guide to Quantization"")', 'raw': ' (""A Visual Guide to Quantization"")'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Would it make sense for you to publish it here too?', 'raw': 'Would it make sense for you to publish it here too?'}]","@MaartenGr nice post https://newsletter.maartengrootendorst.com/p/a-visual-guide-to-quantization (""A Visual Guide to Quantization"") + +Would it make sense for you to publish it here too?",[],"[{'_id': '62ea1ac3cc08a09aa6d3ec95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62ea1ac3cc08a09aa6d3ec95/_74xXYEYLLjNVJ9zQucfn.jpeg', 'fullname': 'Maarten Grootendorst', 'name': 'MaartenGr', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28}]",[],2024-07-29 15:09:26,2024-07-30 07:38:52.530,"[{'_id': '62ea1ac3cc08a09aa6d3ec95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62ea1ac3cc08a09aa6d3ec95/_74xXYEYLLjNVJ9zQucfn.jpeg', 'fullname': 'Maarten Grootendorst', 'name': 'MaartenGr', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 28, 'isFollowing': False}]",/posts/severo/968846619112965,1125,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63ca45c90609f1def7e2775a/mlxL5CKq0z9obRKKG_P-o.png,8.0,Saugat Kafley,Saugatkafley,403721444206150,"[{'type': 'text', 'value': 'I am looking for anotation tool/software for video segmentations, landmarking and feature-detection. ', 'raw': 'I am looking for anotation tool/software for video segmentations, landmarking and feature-detection. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I had bumped into xlabelAnything, however I could not run it on my machine.', 'raw': 'I had bumped into xlabelAnything, however I could not run it on my machine.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Any recommendations ?', 'raw': ' Any recommendations ?'}, {'type': 'new_line', 'raw': '\n'}]","I am looking for anotation tool/software for video segmentations, landmarking and feature-detection. + +I had bumped into xlabelAnything, however I could not run it on my machine. + Any recommendations ? +",[],[],[],2024-07-29 13:05:00,2024-07-30 04:19:37.516,"[{'_id': '66a749cc2646d31d751d9eaa', 'avatarUrl': '/avatars/e7266a995f9f36f25aa46cb0ce517e43.svg', 'fullname': 'whitesmaverick', 'name': 'whitesmaverick', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Saugatkafley/403721444206150,947,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594214747713-5e9ecfc04957053f60648a3e.png,331.0,Quentin Lhoest,lhoestq,819761092372121,"[{'type': 'text', 'value': ""Hey ! I'm working on a 100% synthetic Dataset Hub here (you can search for any kind of datasets an the app invents them). The link is here: "", 'raw': ""Hey ! I'm working on a 100% synthetic Dataset Hub here (you can search for any kind of datasets an the app invents them). The link is here: ""}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'infinite-dataset-hub/infinite-dataset-hub'}, 'url': 'https://huggingface.co/spaces/infinite-dataset-hub/infinite-dataset-hub', 'raw': 'https://huggingface.co/spaces/infinite-dataset-hub/infinite-dataset-hub'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Question for the Community:', 'raw': 'Question for the Community:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Which models should I use to generate images and audio samples for those datasets ? 🤗', 'raw': 'Which models should I use to generate images and audio samples for those datasets ? 🤗'}]","Hey ! I'm working on a 100% synthetic Dataset Hub here (you can search for any kind of datasets an the app invents them). The link is here: https://huggingface.co/spaces/infinite-dataset-hub/infinite-dataset-hub + +Question for the Community: + +Which models should I use to generate images and audio samples for those datasets ? 🤗",[],[],"[{'reaction': '🚀', 'users': ['victor', 'ashercn97', 'cfahlgren1', 'John6666', 'Hev832', 'Nymbo', 'efecelik', 'gabrielmbmb', 'drlordbasil', 'KingNish', 'GPT007', 'osanseviero', 'Jank-com'], 'count': 13}, {'reaction': '🔥', 'users': ['awacke1', 'Jank-com'], 'count': 2}]",2024-07-29 09:46:33,2024-07-31 11:21:01.159,"[{'_id': '5e9ecfc04957053f60648a3e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594214747713-5e9ecfc04957053f60648a3e.png', 'fullname': 'Quentin Lhoest', 'name': 'lhoestq', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 331, 'isFollowing': False}, {'_id': '66200e8e808f53e419d702d8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/AXnwP_G2WkJ0gkBepd_t7.png', 'fullname': 'Marc Kovka', 'name': 'GPT007', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6, 'isFollowing': False}]",/posts/lhoestq/819761092372121,4198,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg,32.0,Firstname Lastname,takeraparterer,446225760997052,"[{'type': 'text', 'value': 'Just made this!', 'raw': 'Just made this!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/takeraparterer/Charformer', 'raw': 'https://github.com/takeraparterer/Charformer'}]","Just made this! +https://github.com/takeraparterer/Charformer",[],[],"[{'reaction': '👀', 'users': ['takeraparterer', 'victor', 'Hev832', 'John6666'], 'count': 4}, {'reaction': '❤️', 'users': ['takeraparterer', 'stefan-it', 'notlober'], 'count': 3}, {'reaction': '🔥', 'users': ['takeraparterer', 'efecelik'], 'count': 2}, {'reaction': '🚀', 'users': ['takeraparterer'], 'count': 1}, {'reaction': '😔', 'users': ['TuringsSolutions'], 'count': 1}]",2024-07-29 06:57:58,2024-08-05 22:15:53.957,"[{'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}]",/posts/takeraparterer/446225760997052,3222,,20 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,347522688239080,"[{'type': 'text', 'value': '🙋🏻\u200d♂️ Hey there folks ', 'raw': '🙋🏻\u200d♂️ Hey there folks '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'made a demo for Nvidia Minitron on an A100. ', 'raw': 'made a demo for Nvidia Minitron on an A100. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Minitron is a family of small language models (SLMs) obtained by pruning NVIDIA's Nemotron-4 15B model. We prune model embedding size, attention heads, and MLP intermediate dimension, following which, we perform continued training with distillation to arrive at the final models."", 'raw': ""Minitron is a family of small language models (SLMs) obtained by pruning NVIDIA's Nemotron-4 15B model. We prune model embedding size, attention heads, and MLP intermediate dimension, following which, we perform continued training with distillation to arrive at the final models.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Deriving the Minitron 8B and 4B models from the base 15B model using our approach requires up to 40x fewer training tokens per model compared to training from scratch; this results in compute cost savings of 1.8x for training the full model family (15B, 8B, and 4B). Minitron models exhibit up to a 16% improvement in MMLU scores compared to training from scratch, perform comparably to other community models such as Mistral 7B, Gemma 7B and Llama-3 8B, and outperform state-of-the-art compression techniques from the literature. Please refer to our arXiv paper for more details.', 'raw': 'Deriving the Minitron 8B and 4B models from the base 15B model using our approach requires up to 40x fewer training tokens per model compared to training from scratch; this results in compute cost savings of 1.8x for training the full model family (15B, 8B, and 4B). Minitron models exhibit up to a 16% improvement in MMLU scores compared to training from scratch, perform comparably to other community models such as Mistral 7B, Gemma 7B and Llama-3 8B, and outperform state-of-the-art compression techniques from the literature. Please refer to our arXiv paper for more details.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Minitron models are for research and development only.', 'raw': 'Minitron models are for research and development only.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'source : ', 'raw': 'source : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nvidia/Minitron-8B-Base'}, 'url': 'https://huggingface.co/nvidia/Minitron-8B-Base', 'raw': 'https://huggingface.co/nvidia/Minitron-8B-Base'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'demo : ', 'raw': 'demo : '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Tonic/Minitron'}, 'url': 'https://huggingface.co/spaces/Tonic/Minitron', 'raw': 'https://huggingface.co/spaces/Tonic/Minitron'}]","🙋🏻‍♂️ Hey there folks + +made a demo for Nvidia Minitron on an A100. + +Minitron is a family of small language models (SLMs) obtained by pruning NVIDIA's Nemotron-4 15B model. We prune model embedding size, attention heads, and MLP intermediate dimension, following which, we perform continued training with distillation to arrive at the final models. + +Deriving the Minitron 8B and 4B models from the base 15B model using our approach requires up to 40x fewer training tokens per model compared to training from scratch; this results in compute cost savings of 1.8x for training the full model family (15B, 8B, and 4B). Minitron models exhibit up to a 16% improvement in MMLU scores compared to training from scratch, perform comparably to other community models such as Mistral 7B, Gemma 7B and Llama-3 8B, and outperform state-of-the-art compression techniques from the literature. Please refer to our arXiv paper for more details. + +Minitron models are for research and development only. + +source : https://huggingface.co/nvidia/Minitron-8B-Base +demo : https://huggingface.co/spaces/Tonic/Minitron",[],[],"[{'reaction': '👍', 'users': ['nicolay-r', 'TuringsSolutions', 'ZeroWw', 'clem', 'Nymbo', 'osanseviero', 'louisbrulenaudet', 'Tonic'], 'count': 8}, {'reaction': '❤️', 'users': ['clem', 'osanseviero'], 'count': 2}]",2024-07-23 09:55:50,2024-07-23 18:21:10.710,"[{'_id': '6569216f9c96f1a47bf45788', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6569216f9c96f1a47bf45788/mCLqmAs4dOjKdxNQVAp1w.png', 'fullname': 'Sica Rius', 'name': 'SicariusSicariiStuff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 388, 'isFollowing': False}]",/posts/Tonic/347522688239080,1724,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/656d73ed0bbc114fe6449704/gpteBU9GmKSHRVkRBUHld.png,34.0,Symbol-LLM,Symbol-LLM,608490648185683,"[{'type': 'text', 'value': '🔥Thrilled to release our 8B version of Symbol-LLM-Instruct ! ', 'raw': '🔥Thrilled to release our 8B version of Symbol-LLM-Instruct ! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It follows the two-stage training strategy proposed in the original paper and is continually optimized on LLaMA3-Chat-8B model.', 'raw': 'It follows the two-stage training strategy proposed in the original paper and is continually optimized on LLaMA3-Chat-8B model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Symbol-LLM was accepted by ACL'24 main conference ! See you in Thailand !"", 'raw': ""Symbol-LLM was accepted by ACL'24 main conference ! See you in Thailand !""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper link: ', 'raw': 'Paper link: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2311.09278', 'raw': 'https://arxiv.org/abs/2311.09278'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper Title: Symbol-LLM: Towards Foundational Symbol-centric Interface For Large Language Models', 'raw': 'Paper Title: Symbol-LLM: Towards Foundational Symbol-centric Interface For Large Language Models'}, {'type': 'new_line', 'raw': '\n'}]","🔥Thrilled to release our 8B version of Symbol-LLM-Instruct ! + +It follows the two-stage training strategy proposed in the original paper and is continually optimized on LLaMA3-Chat-8B model. + +Symbol-LLM was accepted by ACL'24 main conference ! See you in Thailand ! + +Paper link: https://arxiv.org/abs/2311.09278 +Paper Title: Symbol-LLM: Towards Foundational Symbol-centric Interface For Large Language Models +",[],[],"[{'reaction': '🚀', 'users': ['Symbol-LLM', 'Xdotnet', 'Ramdevkijai', 'louisbrulenaudet', 'osanseviero'], 'count': 5}, {'reaction': '🔥', 'users': ['nicolay-r', 'ToKrCZ', 'osanseviero'], 'count': 3}, {'reaction': '🤗', 'users': ['Symbol-LLM'], 'count': 1}, {'reaction': '😎', 'users': ['ZeroWw'], 'count': 1}]",2024-07-23 08:19:31,2024-07-25 20:50:47.254,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}]",/posts/Symbol-LLM/608490648185683,2121,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/669c89e98f2dbc203f9e74ab/higvnXEHeo_Ig2bgTpn47.png,41.0,Vincent Granville,vincentg64,309759264789106,"[{'type': 'text', 'value': 'How to create custom LLMs from scratch', 'raw': 'How to create custom LLMs from scratch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'See my new podcast on this topic, at ', 'raw': 'See my new podcast on this topic, at '}, {'type': 'link', 'href': 'https://mltblog.com/3xS1bf5', 'raw': 'https://mltblog.com/3xS1bf5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Despite GPT, Claude, Gemini, LLama and the other host of LLMs that we have access to, a variety of organizations are still exploring their options when it comes to custom LLMs. Logging in to ChatGPT is easy enough, and so is creating a ‘custom’ openAI GPT, but what does it take to create a truly custom LLM? When and why might this be useful, and will it be worth the effort?', 'raw': 'Despite GPT, Claude, Gemini, LLama and the other host of LLMs that we have access to, a variety of organizations are still exploring their options when it comes to custom LLMs. Logging in to ChatGPT is easy enough, and so is creating a ‘custom’ openAI GPT, but what does it take to create a truly custom LLM? When and why might this be useful, and will it be worth the effort?'}]","How to create custom LLMs from scratch + +See my new podcast on this topic, at https://mltblog.com/3xS1bf5 + +Despite GPT, Claude, Gemini, LLama and the other host of LLMs that we have access to, a variety of organizations are still exploring their options when it comes to custom LLMs. Logging in to ChatGPT is easy enough, and so is creating a ‘custom’ openAI GPT, but what does it take to create a truly custom LLM? When and why might this be useful, and will it be worth the effort?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/669c89e98f2dbc203f9e74ab/0MywN8McYQVfu2fXSn_Mm.png'}]",[],"[{'reaction': '🚀', 'users': ['nicolay-r', 'victor', 'SFBAI'], 'count': 3}]",2024-07-23 06:10:05,2024-07-23 06:10:26.640,[],/posts/vincentg64/309759264789106,1238,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,111964148485189,"[{'type': 'text', 'value': 'LazyLLM - Unusual Colab (Apple & Meta) Yields Impactful Work', 'raw': 'LazyLLM - Unusual Colab (Apple & Meta) Yields Impactful Work'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLM inference typically consists of two stages: prefilling/tokenizing and decoding. In the prefilling stage, the model processes the entire input prompt, computing and caching key-value (KV) pairs for each token, which can be time-consuming for long prompts. This is followed by the decoding stage, where the model generates tokens sequentially, reusing the cached KVs. ', 'raw': 'LLM inference typically consists of two stages: prefilling/tokenizing and decoding. In the prefilling stage, the model processes the entire input prompt, computing and caching key-value (KV) pairs for each token, which can be time-consuming for long prompts. This is followed by the decoding stage, where the model generates tokens sequentially, reusing the cached KVs. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LazyLLM introduces a dynamic token pruning technique. Instead of computing KVs for all tokens during prefilling, LazyLLM selectively processes only the most important tokens based on attention scores, deferring less important ones to later steps if needed. It uses progressive token pruning across transformer layers and introduces an Aux Cache to store hidden states of pruned tokens. ', 'raw': 'LazyLLM introduces a dynamic token pruning technique. Instead of computing KVs for all tokens during prefilling, LazyLLM selectively processes only the most important tokens based on attention scores, deferring less important ones to later steps if needed. It uses progressive token pruning across transformer layers and introduces an Aux Cache to store hidden states of pruned tokens. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This approach significantly reduces the time-to-first-token (TTFT) and overall generation time while maintaining accuracy across various tasks. LazyLLM outperforms baseline techniques like random token dropping and static pruning, and can be easily integrated into existing LLMs without fine-tuning, offering a practical solution for accelerating LLM inference, especially in long context scenarios.', 'raw': 'This approach significantly reduces the time-to-first-token (TTFT) and overall generation time while maintaining accuracy across various tasks. LazyLLM outperforms baseline techniques like random token dropping and static pruning, and can be easily integrated into existing LLMs without fine-tuning, offering a practical solution for accelerating LLM inference, especially in long context scenarios.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'IN SIMPLE TERMS', 'raw': 'IN SIMPLE TERMS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'When you prompt a large language model (LLM), it usually looks at every single word/subword (or tokens) in your prompt before generating a response. This can be time consuming, especially for prompts with very long texts. This paper introduces a new technique that solves this problem by being more selective. Instead of looking at every word right away, it only focuses on the most important words first. It decides which words are important based on how much attention the model gives them. If it needs other words later, it can go back and look at them then. This approach is like skimming a text for key information before reading it in detail.', 'raw': 'When you prompt a large language model (LLM), it usually looks at every single word/subword (or tokens) in your prompt before generating a response. This can be time consuming, especially for prompts with very long texts. This paper introduces a new technique that solves this problem by being more selective. Instead of looking at every word right away, it only focuses on the most important words first. It decides which words are important based on how much attention the model gives them. If it needs other words later, it can go back and look at them then. This approach is like skimming a text for key information before reading it in detail.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read More: ', 'raw': 'Read More: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2407.14057', 'raw': 'https://arxiv.org/pdf/2407.14057'}]","LazyLLM - Unusual Colab (Apple & Meta) Yields Impactful Work + +LLM inference typically consists of two stages: prefilling/tokenizing and decoding. In the prefilling stage, the model processes the entire input prompt, computing and caching key-value (KV) pairs for each token, which can be time-consuming for long prompts. This is followed by the decoding stage, where the model generates tokens sequentially, reusing the cached KVs. + +LazyLLM introduces a dynamic token pruning technique. Instead of computing KVs for all tokens during prefilling, LazyLLM selectively processes only the most important tokens based on attention scores, deferring less important ones to later steps if needed. It uses progressive token pruning across transformer layers and introduces an Aux Cache to store hidden states of pruned tokens. + +This approach significantly reduces the time-to-first-token (TTFT) and overall generation time while maintaining accuracy across various tasks. LazyLLM outperforms baseline techniques like random token dropping and static pruning, and can be easily integrated into existing LLMs without fine-tuning, offering a practical solution for accelerating LLM inference, especially in long context scenarios. + +IN SIMPLE TERMS +When you prompt a large language model (LLM), it usually looks at every single word/subword (or tokens) in your prompt before generating a response. This can be time consuming, especially for prompts with very long texts. This paper introduces a new technique that solves this problem by being more selective. Instead of looking at every word right away, it only focuses on the most important words first. It decides which words are important based on how much attention the model gives them. If it needs other words later, it can go back and look at them then. This approach is like skimming a text for key information before reading it in detail. + +Read More: https://arxiv.org/pdf/2407.14057","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/3yYj8D7xtL1rgi_KHs-aO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/IryreY8BXH2QmGyvAkE8k.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/zvvrP_VO4qFr3dKngJ81O.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/5_TduEda-J9JIRc0ABbKC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/0WMekl3H9Wnizay2h0k3x.png'}]",[],"[{'reaction': '👍', 'users': ['ZeroWw', 'nicolay-r', 'joelbryan', 'osanseviero'], 'count': 4}]",2024-07-23 01:16:16,2024-07-23 19:22:26.671,"[{'_id': '6646428e923866048f3e13e0', 'avatarUrl': '/avatars/54483699273ac58a4a6fe1fa4aab65fe.svg', 'fullname': 'Robert Sinclair', 'name': 'ZeroWw', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 100, 'isFollowing': False}]",/posts/Jaward/111964148485189,1360,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png,159.0,Richard A Aragon,TuringsSolutions,705751692421448,"[{'type': 'text', 'value': 'SNN Image Diffusion V2', 'raw': 'SNN Image Diffusion V2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Billionaires have been made for less than this. This is only one of the things it can it do. It can do API calls, function calls, optimize poker and blackjack odds, anything that is an optimization problem. It costs fractions of a penny and requires fractions of the compute of an LLM model. It can even communicate two ways with an LLM model.', 'raw': 'Billionaires have been made for less than this. This is only one of the things it can it do. It can do API calls, function calls, optimize poker and blackjack odds, anything that is an optimization problem. It costs fractions of a penny and requires fractions of the compute of an LLM model. It can even communicate two ways with an LLM model.'}, {'type': 'new_line', 'raw': '\n'}]","SNN Image Diffusion V2 + +Billionaires have been made for less than this. This is only one of the things it can it do. It can do API calls, function calls, optimize poker and blackjack odds, anything that is an optimization problem. It costs fractions of a penny and requires fractions of the compute of an LLM model. It can even communicate two ways with an LLM model. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64274b69ba6cef0a6ebb0fd6/efHgrIMNS0ICfq7mLdP_T.png'}]",[],"[{'reaction': '👍', 'users': ['zikazach', 'Xdotnet', 'nicolay-r'], 'count': 3}]",2024-07-22 23:46:21,2024-07-23 21:46:14.484,"[{'_id': '6316fb937b0ee0136e5f1220', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6316fb937b0ee0136e5f1220/poHBoJ7QAF_s2CCaosdvQ.jpeg', 'fullname': 'Firstname Lastname', 'name': 'takeraparterer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32, 'isFollowing': False}, {'_id': '64274b69ba6cef0a6ebb0fd6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/cA64Ix1vh75C7HoClUBhx.png', 'fullname': 'Richard A Aragon', 'name': 'TuringsSolutions', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 159, 'isFollowing': False}, {'_id': '635c2f5c3cb827d58118bb01', 'avatarUrl': '/avatars/0087f207c06a793c55ed0489ff793e70.svg', 'fullname': 'nicolo', 'name': 'nicolollo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/TuringsSolutions/705751692421448,1384,,19 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,624836925605809,"[{'type': 'text', 'value': 'Hi HuggingFacers!🤗', 'raw': 'Hi HuggingFacers!🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Good news concerning ', 'raw': 'Good news concerning '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'as-cle-bert/smolLM-arena'}, 'url': 'https://huggingface.co/spaces/as-cle-bert/smolLM-arena', 'raw': 'https://huggingface.co/spaces/as-cle-bert/smolLM-arena'}, {'type': 'text', 'value': ', the chat arena where you can compare some of the Small Language Models (<1.7B) on the Hub and cast your vote to choose the best!📱', 'raw': ', the chat arena where you can compare some of the Small Language Models (<1.7B) on the Hub and cast your vote to choose the best!📱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The space now has a new interface with chatbots instead of textboxs, it runs faster and it also comes with usage instructions :)', 'raw': 'The space now has a new interface with chatbots instead of textboxs, it runs faster and it also comes with usage instructions :)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have fun!🍕', 'raw': 'Have fun!🍕'}]","Hi HuggingFacers!🤗 + +Good news concerning https://huggingface.co/spaces/as-cle-bert/smolLM-arena, the chat arena where you can compare some of the Small Language Models (<1.7B) on the Hub and cast your vote to choose the best!📱 +The space now has a new interface with chatbots instead of textboxs, it runs faster and it also comes with usage instructions :) +Have fun!🍕",[],[],"[{'reaction': '❤️', 'users': ['prithivMLmods', 'nicolay-r', 'Ramikan-BR', 'cahlen', 'osanseviero', 'quyettv', 'louisbrulenaudet'], 'count': 7}, {'reaction': '🚀', 'users': ['Ramikan-BR', 'Felladrin'], 'count': 2}, {'reaction': '🔥', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}]",2024-07-22 21:39:50,2024-07-22 21:39:50.217,[],/posts/as-cle-bert/624836925605809,1403,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png,5900.0,Joshua,Xenova,284224143230669,"[{'type': 'text', 'value': 'Introducing Whisper Diarization: Multilingual speech recognition with word-level timestamps and speaker segmentation, running 100% locally in your browser thanks to 🤗 Transformers.js!', 'raw': 'Introducing Whisper Diarization: Multilingual speech recognition with word-level timestamps and speaker segmentation, running 100% locally in your browser thanks to 🤗 Transformers.js!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tested on this iconic Letterman interview w/ Grace Hopper from 1983!', 'raw': 'Tested on this iconic Letterman interview w/ Grace Hopper from 1983!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Demo: ', 'raw': '- Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Xenova/whisper-speaker-diarization'}, 'url': 'https://huggingface.co/spaces/Xenova/whisper-speaker-diarization', 'raw': 'https://huggingface.co/spaces/Xenova/whisper-speaker-diarization'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Source code: ', 'raw': '- Source code: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Xenova/whisper-speaker-diarization'}, 'url': 'https://huggingface.co/spaces/Xenova/whisper-speaker-diarization/tree/main/whisper-speaker-diarization', 'raw': 'https://huggingface.co/spaces/Xenova/whisper-speaker-diarization/tree/main/whisper-speaker-diarization'}]","Introducing Whisper Diarization: Multilingual speech recognition with word-level timestamps and speaker segmentation, running 100% locally in your browser thanks to 🤗 Transformers.js! + +Tested on this iconic Letterman interview w/ Grace Hopper from 1983! +- Demo: https://huggingface.co/spaces/Xenova/whisper-speaker-diarization +- Source code: https://huggingface.co/spaces/Xenova/whisper-speaker-diarization/tree/main/whisper-speaker-diarization","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b253b7ac5ecaae3d1efe0c/cLCFwAvCkTp-hM6eY1B4q.mp4'}]",[],"[{'reaction': '👍', 'users': ['freinold', 'ZeroWw', 'John6666', 'DmitryRyumin', 'okamirvs', 'Nymbo', 'ssboost', 'maruyasa', 'Deddy', 'GPT007', 'sudanenator', 'Rsln', 'dave3991', 'bmorphism', 'toshvelaga', 'osanseviero', 'd8rt8v', 'devstockgirl'], 'count': 18}, {'reaction': '🔥', 'users': ['ssboost', 'prithivMLmods', 'Sylvestre', 'Deddy', 'nicolay-r', 'Gatozu35', 'AARon99', 'toshvelaga', 'osanseviero'], 'count': 9}, {'reaction': '❤️', 'users': ['julien-rodriguez', 'DataSoul', 'BoscoTheDog', 'clem', 'toshvelaga', 'osanseviero'], 'count': 6}]",2024-07-22 20:30:29,2024-07-23 20:05:06.948,"[{'_id': '6303cc3e1dd5d3c62483bd51', 'avatarUrl': '/avatars/afbc48df2e8c47c35be48168113d83c0.svg', 'fullname': 's', 'name': 'Tom-Neverwinter', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/Xenova/284224143230669,8152,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,689972259553494,"[{'type': 'text', 'value': '""By the end of this blog post, you will have ', 'raw': '""By the end of this blog post, you will have '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- learnt all the new goodies accompanying the latest macOS release ', 'raw': '- learnt all the new goodies accompanying the latest macOS release '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- AND successfully run a 7B parameter model using less than 4GB of memory on your Mac.""', 'raw': '- AND successfully run a 7B parameter model using less than 4GB of memory on your Mac.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Game-changer for local AI? Can't wait to try this! "", 'raw': ""Game-changer for local AI? Can't wait to try this! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Brillant work by ', 'raw': 'Brillant work by '}, {'type': 'mention', 'user': 'pcuenq', 'raw': '@pcuenq'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'osanseviero', 'raw': '@osanseviero'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'reach-vb', 'raw': '@reach-vb'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'FL33TW00D-HF', 'raw': '@FL33TW00D-HF'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out: ', 'raw': 'Check it out: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/mistral-coreml', 'raw': 'https://huggingface.co/blog/mistral-coreml'}, {'type': 'text', 'value': ' #apple ', 'raw': ' #apple '}]","""By the end of this blog post, you will have +- learnt all the new goodies accompanying the latest macOS release +- AND successfully run a 7B parameter model using less than 4GB of memory on your Mac."" + +Game-changer for local AI? Can't wait to try this! + +Brillant work by @pcuenq @osanseviero @reach-vb @FL33TW00D-HF + +Check it out: https://huggingface.co/blog/mistral-coreml #apple ","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/NwjC-VUQBapnb50qkopv4.mp4'}]","[{'_id': '6597e9f42235d4056bc6980a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6597e9f42235d4056bc6980a/6N_Eira5Rj5e8ZdgekKPQ.jpeg', 'fullname': 'Christopher Fleetwood', 'name': 'FL33TW00D-HF', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 121}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221}, {'_id': '603d25b75f9d390ab190b777', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1617264212503-603d25b75f9d390ab190b777.jpeg', 'fullname': 'Pedro Cuenca', 'name': 'pcuenq', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1132}, {'_id': '61b85ce86eb1f2c5e6233736', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1655385361868-61b85ce86eb1f2c5e6233736.jpeg', 'fullname': 'Vaibhav Srivastav', 'name': 'reach-vb', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 930}]","[{'reaction': '🔥', 'users': ['enzolib', 'FL33TW00D-HF', 'szymonrucinski'], 'count': 3}]",2024-07-22 18:09:25,2024-07-22 21:03:17.173,"[{'_id': '6646428e923866048f3e13e0', 'avatarUrl': '/avatars/54483699273ac58a4a6fe1fa4aab65fe.svg', 'fullname': 'Robert Sinclair', 'name': 'ZeroWw', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 100, 'isFollowing': False}]",/posts/fdaudens/689972259553494,659,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/600ae38cc92b79f54efd4556/cSqRIslYl5L3I4WK3a31f.png,83.0,Hieu Lam,lamhieu,421245113676574,"[{'type': 'text', 'value': '🎉 Ghost 8B Beta Released: Game-Changing Language Model', 'raw': '🎉 Ghost 8B Beta Released: Game-Changing Language Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '--', 'raw': '--'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Ghost 8B Beta is a groundbreaking language model developed with a clear vision: to deliver exceptional multilingual support, superior knowledge capabilities, and all while remaining cost-effective. This model comes in two context length variations, 8k and 128k, ensuring flexibility for various tasks. Moreover, it boasts built-in multilingual functionality, making it a powerful tool for global communication and understanding.', 'raw': 'Ghost 8B Beta is a groundbreaking language model developed with a clear vision: to deliver exceptional multilingual support, superior knowledge capabilities, and all while remaining cost-effective. This model comes in two context length variations, 8k and 128k, ensuring flexibility for various tasks. Moreover, it boasts built-in multilingual functionality, making it a powerful tool for global communication and understanding.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '--', 'raw': '--'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* See detailed article: ', 'raw': '* See detailed article: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/lamhieu/ghost-8b-beta-released-game-changing-language-mode', 'raw': 'https://huggingface.co/blog/lamhieu/ghost-8b-beta-released-game-changing-language-mode'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Model card: ', 'raw': '* Model card: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ghost-x/ghost-8b-beta'}, 'url': 'https://huggingface.co/ghost-x/ghost-8b-beta', 'raw': 'https://huggingface.co/ghost-x/ghost-8b-beta'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Official website: ', 'raw': '* Official website: '}, {'type': 'link', 'href': 'https://ghost-x.org/docs/models/ghost-8b-beta', 'raw': 'https://ghost-x.org/docs/models/ghost-8b-beta'}]","🎉 Ghost 8B Beta Released: Game-Changing Language Model +-- +Ghost 8B Beta is a groundbreaking language model developed with a clear vision: to deliver exceptional multilingual support, superior knowledge capabilities, and all while remaining cost-effective. This model comes in two context length variations, 8k and 128k, ensuring flexibility for various tasks. Moreover, it boasts built-in multilingual functionality, making it a powerful tool for global communication and understanding. +-- +* See detailed article: https://huggingface.co/blog/lamhieu/ghost-8b-beta-released-game-changing-language-mode +* Model card: https://huggingface.co/ghost-x/ghost-8b-beta +* Official website: https://ghost-x.org/docs/models/ghost-8b-beta",[],[],"[{'reaction': '❤️', 'users': ['ZeroWw', 'danielus', 'nicolay-r', 'ecyht2', 'John6666', 'MDalprato', 'louisbrulenaudet'], 'count': 7}, {'reaction': '🤯', 'users': ['stefan-it'], 'count': 1}]",2024-07-22 17:44:14,2024-07-22 17:44:14.126,[],/posts/lamhieu/421245113676574,2110,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4VOzArmrRaX_DUTxGmm59.jpeg,59.0,Charles McSneed,ChuckMcSneed,797420456175789,"[{'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/Azure/azureml-assets/pull/3180/files', 'raw': 'https://github.com/Azure/azureml-assets/pull/3180/files'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLAMA-3.1 benches', 'raw': 'LLAMA-3.1 benches'}]"," +https://github.com/Azure/azureml-assets/pull/3180/files +LLAMA-3.1 benches","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65644e982bdaccfcd536aff1/eWfOOX5Ljs8NgWwpEmdOp.png'}]",[],"[{'reaction': '😔', 'users': ['ZeroWw'], 'count': 1}]",2024-07-22 17:08:09,2024-07-26 10:49:35.773,"[{'_id': '6646428e923866048f3e13e0', 'avatarUrl': '/avatars/54483699273ac58a4a6fe1fa4aab65fe.svg', 'fullname': 'Robert Sinclair', 'name': 'ZeroWw', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 100, 'isFollowing': False}, {'_id': '660432d2d2e59abb3fd40b8c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/660432d2d2e59abb3fd40b8c/TrvNnR8wHDh9lPHm81JfQ.png', 'fullname': 'David Meriwether', 'name': 'BigHuggyD', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 31, 'isFollowing': False}, {'_id': '65995c45539c808e84c38bf1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65995c45539c808e84c38bf1/k0y3ULloWQEMvosQwHgrE.png', 'fullname': 'Juk Armstrong', 'name': 'jukofyork', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 110, 'isFollowing': False}, {'_id': '65644e982bdaccfcd536aff1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/4VOzArmrRaX_DUTxGmm59.jpeg', 'fullname': 'Charles McSneed', 'name': 'ChuckMcSneed', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 59, 'isFollowing': False}]",/posts/ChuckMcSneed/797420456175789,803,,26 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,687758478477767,"[{'type': 'text', 'value': 'Small models, BIG impact: SmolLM is here! 🚀🔬', 'raw': 'Small models, BIG impact: SmolLM is here! 🚀🔬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're launching a series of small but mighty language models:"", 'raw': ""We're launching a series of small but mighty language models:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏎️ Super fast - runs on laptops, phones, you name it!', 'raw': '🏎️ Super fast - runs on laptops, phones, you name it!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📏 3 sizes: 130M, 350M, and 1.5B parameters', 'raw': '📏 3 sizes: 130M, 350M, and 1.5B parameters'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥇 Outperforms same size models from Meta, Microsoft, and Qwen', 'raw': '🥇 Outperforms same size models from Meta, Microsoft, and Qwen'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔓 Fully open-source: datasets, training code, models', 'raw': '🔓 Fully open-source: datasets, training code, models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐊𝐞𝐲 𝐟𝐞𝐚𝐭𝐮𝐫𝐞𝐬', 'raw': '𝐊𝐞𝐲 𝐟𝐞𝐚𝐭𝐮𝐫𝐞𝐬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Trained on FineWeb-Edu and Cosmopedia v2 (largest synthetic pre-training dataset)', 'raw': '- Trained on FineWeb-Edu and Cosmopedia v2 (largest synthetic pre-training dataset)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- No cloud needed - run locally for privacy and energy efficiency', 'raw': '- No cloud needed - run locally for privacy and energy efficiency'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Everything is public, from data curation to training steps', 'raw': '- Everything is public, from data curation to training steps'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐏𝐨𝐭𝐞𝐧𝐭𝐢𝐚𝐥 𝐮𝐬𝐞 𝐜𝐚𝐬𝐞𝐬', 'raw': '𝐏𝐨𝐭𝐞𝐧𝐭𝐢𝐚𝐥 𝐮𝐬𝐞 𝐜𝐚𝐬𝐞𝐬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- On-device autocomplete', 'raw': '- On-device autocomplete'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Local request parsing', 'raw': '- Local request parsing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Custom fine-tuning for specific needs without the need for expensive GPUs', 'raw': '- Custom fine-tuning for specific needs without the need for expensive GPUs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐆𝐨 𝐝𝐞𝐞𝐩𝐞𝐫', 'raw': '𝐆𝐨 𝐝𝐞𝐞𝐩𝐞𝐫'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Check it out: ', 'raw': '👉 Check it out: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/HuggingFaceTB/smollm-models-6695016cad7167254ce15966', 'raw': 'https://huggingface.co/collections/HuggingFaceTB/smollm-models-6695016cad7167254ce15966'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Run the 360M model in your browser, 100 % private: ', 'raw': '👉 Run the 360M model in your browser, 100 % private: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'HuggingFaceTB/SmolLM-360M-Instruct-WebGPU'}, 'url': 'https://huggingface.co/spaces/HuggingFaceTB/SmolLM-360M-Instruct-WebGPU', 'raw': 'https://huggingface.co/spaces/HuggingFaceTB/SmolLM-360M-Instruct-WebGPU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Read the blog explaining everything in detail: huggingface.co/blog/smollm', 'raw': '👉 Read the blog explaining everything in detail: huggingface.co/blog/smollm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kudos to the stellar team who worked on this project: ', 'raw': 'Kudos to the stellar team who worked on this project: '}, {'type': 'mention', 'user': 'loubnabnl', 'raw': '@loubnabnl'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'anton-l', 'raw': '@anton-l'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'eliebak', 'raw': '@eliebak'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'lvwerra', 'raw': '@lvwerra'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Small models, BIG impact: SmolLM is here! 🚀🔬 + +We're launching a series of small but mighty language models: +🏎️ Super fast - runs on laptops, phones, you name it! +📏 3 sizes: 130M, 350M, and 1.5B parameters +🥇 Outperforms same size models from Meta, Microsoft, and Qwen +🔓 Fully open-source: datasets, training code, models + +𝐊𝐞𝐲 𝐟𝐞𝐚𝐭𝐮𝐫𝐞𝐬 +- Trained on FineWeb-Edu and Cosmopedia v2 (largest synthetic pre-training dataset) +- No cloud needed - run locally for privacy and energy efficiency +- Everything is public, from data curation to training steps + +𝐏𝐨𝐭𝐞𝐧𝐭𝐢𝐚𝐥 𝐮𝐬𝐞 𝐜𝐚𝐬𝐞𝐬 +- On-device autocomplete +- Local request parsing +- Custom fine-tuning for specific needs without the need for expensive GPUs + +𝐆𝐨 𝐝𝐞𝐞𝐩𝐞𝐫 +👉 Check it out: https://huggingface.co/collections/HuggingFaceTB/smollm-models-6695016cad7167254ce15966 +👉 Run the 360M model in your browser, 100 % private: https://huggingface.co/spaces/HuggingFaceTB/SmolLM-360M-Instruct-WebGPU +👉 Read the blog explaining everything in detail: huggingface.co/blog/smollm + +Kudos to the stellar team who worked on this project: @loubnabnl @anton-l @eliebak @lvwerra ",[],"[{'_id': '602e6dee60e3dd96631c906e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1613655355830-noauth.png', 'fullname': 'Anton Lozhkov', 'name': 'anton-l', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 209}, {'_id': '651e96991b97c9f33d26bde6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/651e96991b97c9f33d26bde6/-Bqs6qrmz0yCfwtB2e-6q.jpeg', 'fullname': 'Elie Bakouch', 'name': 'eliebak', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 179}, {'_id': '61c141342aac764ce1654e43', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61c141342aac764ce1654e43/81AwoT5IQ_Xdw0OVw7TKu.jpeg', 'fullname': 'Loubna Ben Allal', 'name': 'loubnabnl', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3482}, {'_id': '5e48005437cb5b49818287a5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e48005437cb5b49818287a5/4uCXGGui-9QifAT4qelxU.png', 'fullname': 'Leandro von Werra', 'name': 'lvwerra', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 438}]","[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'sgarbi', 'sivan22', 'mdouglas', 'nroggendorff', 'John6666', 'reach-vb', 'plaguss', 'gabrielmbmb', 'osanseviero', 'jeffboudier', 'AyoubChLin', 'holooo', 'raveninrhythm', 'Ramikan-BR', 'clem', 'yjernite', 'louisbrulenaudet'], 'count': 18}, {'reaction': '🤝', 'users': ['surfhb', 'reach-vb', 'osanseviero', 'FaultyEntry', 'ucsahin', 'raveninrhythm', 'Ramikan-BR', 'clem', 'yjernite'], 'count': 9}, {'reaction': '🚀', 'users': ['Ramikan-BR', 'clem', 'yjernite'], 'count': 3}, {'reaction': '❤️', 'users': ['Ramikan-BR', 'clem', 'yjernite'], 'count': 3}, {'reaction': '👀', 'users': ['Ramikan-BR', 'clem'], 'count': 2}, {'reaction': '🧠', 'users': ['Ramikan-BR', 'clem'], 'count': 2}]",2024-07-16 17:39:37,2024-07-16 17:39:37.821,[],/posts/fdaudens/687758478477767,3329,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6340651b388c3fa40f9a5bc0/av1C4_S7bHGxAzOu8lOmG.jpeg,427.0,Adam Molnar,lunarflu,618460361384960,"[{'type': 'text', 'value': 'Cool things this week from ', 'raw': 'Cool things this week from '}, {'type': 'mention', 'user': 'huggingface', 'raw': '@huggingface'}, {'type': 'text', 'value': '!', 'raw': '!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌎AI math olympiad winner NuminaMath is here!', 'raw': '🌎AI math olympiad winner NuminaMath is here!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗Announcing New Hugging Face and Keras NLP integration', 'raw': '🤗Announcing New Hugging Face and Keras NLP integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨UI overhaul to HF tokens! ', 'raw': '✨UI overhaul to HF tokens! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧊 Embed our dataset viewer on any webpage!', 'raw': '🧊 Embed our dataset viewer on any webpage!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/winning-aimo-progress-prize', 'raw': 'https://huggingface.co/blog/winning-aimo-progress-prize'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/keras-nlp-integration', 'raw': 'https://huggingface.co/blog/keras-nlp-integration'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/settings/tokens', 'raw': 'https://huggingface.co/settings/tokens'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://x.com/julien_c/status/1812099420726456457', 'raw': 'https://x.com/julien_c/status/1812099420726456457'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the full list on our discord! 👇', 'raw': 'Check out the full list on our discord! 👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://discord.com/invite/JfAtkvEtRb', 'raw': 'https://discord.com/invite/JfAtkvEtRb'}, {'type': 'new_line', 'raw': '\n'}]","Cool things this week from @huggingface! + +🌎AI math olympiad winner NuminaMath is here! +🤗Announcing New Hugging Face and Keras NLP integration +✨UI overhaul to HF tokens! +🧊 Embed our dataset viewer on any webpage! + +https://huggingface.co/blog/winning-aimo-progress-prize +https://huggingface.co/blog/keras-nlp-integration +https://huggingface.co/settings/tokens +https://x.com/julien_c/status/1812099420726456457 + +Check out the full list on our discord! 👇 +https://discord.com/invite/JfAtkvEtRb +",[],[],"[{'reaction': '🔥', 'users': ['reach-vb', 'plaguss', 'osanseviero', 'jeffboudier', 'not-lain', 'yjernite', 'louisbrulenaudet', 'Blane187', 'apehex'], 'count': 9}]",2024-07-16 15:22:26,2024-07-16 15:22:26.679,[],/posts/lunarflu/618460361384960,1972,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1663599585288-noauth.png,36.0,Luciano Santa Brígida,lucianosb,799720029968177,"[{'type': 'text', 'value': 'The community Journalists on HuggingFace recently launched a tool (', 'raw': 'The community Journalists on HuggingFace recently launched a tool ('}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'JournalistsonHF/text-to-image-bias'}, 'url': 'https://huggingface.co/spaces/JournalistsonHF/text-to-image-bias', 'raw': 'https://huggingface.co/spaces/JournalistsonHF/text-to-image-bias'}, {'type': 'text', 'value': ') to compare biases across several text-to-image models. I forked my own to evaluate the SDXL models I made.', 'raw': ') to compare biases across several text-to-image models. I forked my own to evaluate the SDXL models I made.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## SinteticoXL Bias:', 'raw': '## SinteticoXL Bias:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'lucianosb/sinteticoXL-bias'}, 'url': 'https://huggingface.co/spaces/lucianosb/sinteticoXL-bias', 'raw': 'https://huggingface.co/spaces/lucianosb/sinteticoXL-bias'}]","The community Journalists on HuggingFace recently launched a tool (https://huggingface.co/spaces/JournalistsonHF/text-to-image-bias) to compare biases across several text-to-image models. I forked my own to evaluate the SDXL models I made. + +## SinteticoXL Bias: + +https://huggingface.co/spaces/lucianosb/sinteticoXL-bias","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/632883edb0910efc277f0f6b/HdhBloHrMJpPxowyQS7Uw.png'}]",[],"[{'reaction': '🔥', 'users': ['fdaudens', 'evijit', 'reach-vb', 'yjernite'], 'count': 4}]",2024-07-16 13:21:01,2024-07-16 17:28:56.217,"[{'_id': '647f36a8454af0237bd49574', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg', 'fullname': 'Florent Daudens', 'name': 'fdaudens', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 759, 'isFollowing': False}]",/posts/lucianosb/799720029968177,1679,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,737813976382134,"[{'type': 'text', 'value': '🔴⭐ New addition to the existing concept space! 🔴⭐', 'raw': '🔴⭐ New addition to the existing concept space! 🔴⭐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏞️ Space: ', 'raw': '🏞️ Space: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K', 'raw': 'https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Tried the Duotone Canvas with the image generator. Unlike the duotone filter in the Canva app, which applies hue and tints in RGBA, this feature applies duotones based purely on the provided prompt to personalize the generated image.', 'raw': '🚀 Tried the Duotone Canvas with the image generator. Unlike the duotone filter in the Canva app, which applies hue and tints in RGBA, this feature applies duotones based purely on the provided prompt to personalize the generated image.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 These tones also work with the gridding option, which already exists in the space.', 'raw': '🚀 These tones also work with the gridding option, which already exists in the space.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 The application of tones depends on the quality and detail of the prompt given. The palette may be distorted in some cases.', 'raw': '🚀 The application of tones depends on the quality and detail of the prompt given. The palette may be distorted in some cases.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🚀It doesn't apply like a hue or tint in RGBA (as shown in canva app below); it is purely based on the prompts passed."", 'raw': ""🚀It doesn't apply like a hue or tint in RGBA (as shown in canva app below); it is purely based on the prompts passed.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏞️ Check out the space: ', 'raw': '🏞️ Check out the space: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K', 'raw': 'https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏜️Collection: ', 'raw': '🏜️Collection: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/prithivMLmods/collection-zero-65e48a7dd8212873836ceca2', 'raw': 'https://huggingface.co/collections/prithivMLmods/collection-zero-65e48a7dd8212873836ceca2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'huggingface.co/spaces/prithivMLmods/IMAGINEO-4K', 'raw': '```\nhuggingface.co/spaces/prithivMLmods/IMAGINEO-4K\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏞️What you can do with this space:', 'raw': '🏞️What you can do with this space:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Compose Image Grid', 'raw': '✅ Compose Image Grid'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉🏻 ""2x1"", ""1x2"", ""2x2"", ""2x3"", ""3x2"", ""1x1""', 'raw': '👉🏻 ""2x1"", ""1x2"", ""2x2"", ""2x3"", ""3x2"", ""1x1""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Apply styles', 'raw': '✅ Apply styles'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Set up Image tones', 'raw': '✅ Set up Image tones'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Apply filters & adjust quality', 'raw': '✅ Apply filters & adjust quality'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks for reading!', 'raw': 'Thanks for reading!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'mention', 'user': 'prithivMLmods', 'raw': '@prithivMLmods'}]","🔴⭐ New addition to the existing concept space! 🔴⭐ + +🏞️ Space: https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K + +🚀 Tried the Duotone Canvas with the image generator. Unlike the duotone filter in the Canva app, which applies hue and tints in RGBA, this feature applies duotones based purely on the provided prompt to personalize the generated image. + +🚀 These tones also work with the gridding option, which already exists in the space. + +🚀 The application of tones depends on the quality and detail of the prompt given. The palette may be distorted in some cases. + +🚀It doesn't apply like a hue or tint in RGBA (as shown in canva app below); it is purely based on the prompts passed. + +🏞️ Check out the space: https://huggingface.co/spaces/prithivMLmods/IMAGINEO-4K +🏜️Collection: https://huggingface.co/collections/prithivMLmods/collection-zero-65e48a7dd8212873836ceca2 + +``` +huggingface.co/spaces/prithivMLmods/IMAGINEO-4K +``` +🏞️What you can do with this space: +✅ Compose Image Grid +👉🏻 ""2x1"", ""1x2"", ""2x2"", ""2x3"", ""3x2"", ""1x1"" +✅ Apply styles +✅ Set up Image tones +✅ Apply filters & adjust quality + +. +. +. +Thanks for reading! +- @prithivMLmods","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ILU305r70lSdmAk84q2Qx.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/RuEfpRplU32zeoqMwwC85.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/6ci2TXUcoFaZHM6YxT0OT.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/jXfyntaOBIYAlypt93Efh.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/FQAOWkicaE-s--kuAk4gy.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65bb837dbfb878f46c77de4c/ACZgwTbmQ_T79YYLizb1T.png'}]","[{'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957}]","[{'reaction': '❤️', 'users': ['reach-vb', 'KingNish', 'GPT007', 'prithivMLmods', 'NanyTVZ19'], 'count': 5}, {'reaction': '👍', 'users': ['BishalRD', 'prithivMLmods', 'reach-vb'], 'count': 3}, {'reaction': '🚀', 'users': ['prithivMLmods', 'reach-vb'], 'count': 2}, {'reaction': '🔥', 'users': ['prithivMLmods'], 'count': 1}, {'reaction': '➕', 'users': ['prithivMLmods'], 'count': 1}]",2024-07-15 23:47:45,2024-08-06 09:59:54.161,"[{'_id': '665832994f776d66a2a546ac', 'avatarUrl': '/avatars/43b282d651d6b903bc2756e48034ee7a.svg', 'fullname': 'Samurai', 'name': 'The-Last-Samurai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/prithivMLmods/737813976382134,3440,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,211717085209386,"[{'type': 'text', 'value': '🚀🕺🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟💃🚀', 'raw': '🚀🕺🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟💃🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: IntrinsicAvatar: Physically Based Inverse Rendering of Dynamic Humans from Monocular Videos via Explicit Ray Tracing 🔝', 'raw': '📄 Title: IntrinsicAvatar: Physically Based Inverse Rendering of Dynamic Humans from Monocular Videos via Explicit Ray Tracing 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: IntrinsicAvatar is a method for extracting high-quality geometry, albedo, material, and lighting properties of clothed human avatars from monocular videos using explicit ray tracing and volumetric scattering, enabling realistic animations under varying lighting conditions.', 'raw': '📝 Description: IntrinsicAvatar is a method for extracting high-quality geometry, albedo, material, and lighting properties of clothed human avatars from monocular videos using explicit ray tracing and volumetric scattering, enabling realistic animations under varying lighting conditions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Shaofei Wang, Božidar Antić, Andreas Geiger, and Siyu Tang', 'raw': '👥 Authors: Shaofei Wang, Božidar Antić, Andreas Geiger, and Siyu Tang'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸', 'raw': '📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2312.05210'}, 'url': 'https://huggingface.co/papers/2312.05210', 'raw': 'https://huggingface.co/papers/2312.05210', 'label': 'IntrinsicAvatar: Physically Based Inverse Rendering of Dynamic Humans\n from Monocular Videos via Explicit Ray Tracing (2312.05210)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://neuralbodies.github.io/IntrinsicAvatar/', 'raw': 'https://neuralbodies.github.io/IntrinsicAvatar/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/taconite/IntrinsicAvatar', 'raw': 'https://github.com/taconite/IntrinsicAvatar'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📺 Video: ', 'raw': '📺 Video: '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=aS8AIxgVXzI', 'raw': 'https://www.youtube.com/watch?v=aS8AIxgVXzI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 CVPR-2023-24-Papers: ', 'raw': '🚀 CVPR-2023-24-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/CVPR-2023-24-Papers', 'raw': 'https://github.com/DmitryRyumin/CVPR-2023-24-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 WACV-2024-Papers: ', 'raw': '🚀 WACV-2024-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/WACV-2024-Papers', 'raw': 'https://github.com/DmitryRyumin/WACV-2024-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 ICCV-2023-Papers: ', 'raw': '🚀 ICCV-2023-Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/ICCV-2023-Papers', 'raw': 'https://github.com/DmitryRyumin/ICCV-2023-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #IntrinsicAvatar #InverseRendering #MonocularVideos #RayTracing #VolumetricScattering #3DReconstruction #MachineLearning #ComputerVision #DeepLearning #AI #CVPR2024', 'raw': '🔍 Keywords: #IntrinsicAvatar #InverseRendering #MonocularVideos #RayTracing #VolumetricScattering #3DReconstruction #MachineLearning #ComputerVision #DeepLearning #AI #CVPR2024'}]","🚀🕺🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟💃🚀 +📄 Title: IntrinsicAvatar: Physically Based Inverse Rendering of Dynamic Humans from Monocular Videos via Explicit Ray Tracing 🔝 + +📝 Description: IntrinsicAvatar is a method for extracting high-quality geometry, albedo, material, and lighting properties of clothed human avatars from monocular videos using explicit ray tracing and volumetric scattering, enabling realistic animations under varying lighting conditions. + +👥 Authors: Shaofei Wang, Božidar Antić, Andreas Geiger, and Siyu Tang + +📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸 + +🔗 Paper: https://huggingface.co/papers/2312.05210 + +🌐 Github Page: https://neuralbodies.github.io/IntrinsicAvatar/ +📁 Repository: https://github.com/taconite/IntrinsicAvatar + +📺 Video: https://www.youtube.com/watch?v=aS8AIxgVXzI + +🚀 CVPR-2023-24-Papers: https://github.com/DmitryRyumin/CVPR-2023-24-Papers + +🚀 WACV-2024-Papers: https://github.com/DmitryRyumin/WACV-2024-Papers + +🚀 ICCV-2023-Papers: https://github.com/DmitryRyumin/ICCV-2023-Papers + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #IntrinsicAvatar #InverseRendering #MonocularVideos #RayTracing #VolumetricScattering #3DReconstruction #MachineLearning #ComputerVision #DeepLearning #AI #CVPR2024","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/IxR-zXeMuYqdLLSDlBlOZ.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/DFuFo92ZBwiDfR_6RtaON.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/L5D8A7kdMEOb4EEGuCStw.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/kZRysC8kE0T-iJOMj8jN8.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/upYBwSFUU8RKqitfDRBNj.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/OqAeCyNw3T6Fs4pPyKewX.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/b4bIuLCwlQjV0vIwbAkXA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/qBw-6Gn76DQCSYXANegxp.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/i9KDmm5FkKuuT6e8nyY4v.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/1Iye3JkI2dtpjnINTK39c.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/nlbaHUt8d5h-U6bDW-yw6.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '👍', 'users': ['DmitryRyumin', 'prithivMLmods', 'ClayFace', 'Ramikan-BR', 'reach-vb', 'Alexandro14'], 'count': 6}]",2024-07-15 18:58:21,2024-07-15 18:58:21.225,[],/posts/DmitryRyumin/211717085209386,2303,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg,2185.0,Hafedh Hichri,not-lain,115484245251879,"[{'type': 'text', 'value': 'I am now a huggingface fellow 🥳', 'raw': 'I am now a huggingface fellow 🥳'}]",I am now a huggingface fellow 🥳,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/bnAt54rxoMJ2rbqaEjxZZ.png'}]",[],"[{'reaction': '🤗', 'users': ['mrfakename', 'MandyGreatness', 'louisbrulenaudet', 'Tonic', 'kramp', 'osanseviero', 's3nh', 'GPT007', 'enzostvs', 'SeanCLucas', 'd0rj', 'hayden-donnelly', 'adamelliotfields', 'Taylor658', 'gabrielmbmb', 'RoboApocalypse', 'reach-vb', 'jeffboudier', 'Blane187', 'Ramikan-BR', 'HDiffusion', 'zhlh', 'ThatOneCoder', 'Hev832', 'Haleshot', 'mmhamdy', 'Fredithefish', 'nroggendorff', 'rmuthiah13', 'mvandermeulen', 'createtheimaginable'], 'count': 31}, {'reaction': '🤝', 'users': ['prithivMLmods', 'Tonic', 'BiniyamAjaw', 'osanseviero', 's3nh', 'reach-vb', 'jeffboudier', 'Ramikan-BR', 'Fredithefish'], 'count': 9}, {'reaction': '👍', 'users': ['ijohn07', 'den0620', 'reach-vb', 'dashfunnydashdash', 'Ramikan-BR', 'ThatOneCoder', 'Tonic'], 'count': 7}, {'reaction': '🔥', 'users': ['mmhamdy', 'Tonic'], 'count': 2}]",2024-07-15 18:07:05,2024-09-04 12:52:11.797,"[{'_id': '659f000b83abded48e190901', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png', 'fullname': 'Noa Roggendorff', 'name': 'nroggendorff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 227, 'isFollowing': False}, {'_id': '62e54f0eae9d3f10acb95cb9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png', 'fullname': 'mrfakename', 'name': 'mrfakename', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2157, 'isFollowing': False}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}, {'_id': '65e2f1cb4dbf9514fb475b48', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65e2f1cb4dbf9514fb475b48/0EwhfSfMCy8P2e7nJWaOO.jpeg', 'fullname': 'Rico Ardiansyah', 'name': 'Blane187', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 41, 'isFollowing': False}, {'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '66c75fe82c2207bb1732c672', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66c75fe82c2207bb1732c672/X_a8y4ZrSAQEylKpERMFL.jpeg', 'fullname': 'Scott Cawthon', 'name': 'Opa-Opa', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '653a75b19430762a5ca8674b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/653a75b19430762a5ca8674b/mny08nh70r9OSwJJsSOci.jpeg', 'fullname': 'Jaquavious Finkleton', 'name': 'ThatOneCoder', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7, 'isFollowing': False}]",/posts/not-lain/115484245251879,7805,,15 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,778836619881171,"[{'type': 'text', 'value': 'Exciting news for audio AI enthusiasts! 🎙️🌍', 'raw': 'Exciting news for audio AI enthusiasts! 🎙️🌍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The Emilia dataset dropped last week, and it's a cool one:"", 'raw': ""The Emilia dataset dropped last week, and it's a cool one:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 101k+ hours of high-quality audio', 'raw': '- 101k+ hours of high-quality audio'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- 6 languages: 🇨🇳 🇺🇸 🇯🇵 🇰🇷 🇩🇪 🇫🇷', 'raw': '- 6 languages: 🇨🇳 🇺🇸 🇯🇵 🇰🇷 🇩🇪 🇫🇷'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Diverse content: talk shows, interviews, debates, sports commentary, audiobooks', 'raw': '- Diverse content: talk shows, interviews, debates, sports commentary, audiobooks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset could improve multilingual speech generation and recognition. Opens up many possibilities for global media, language learning, and accessibility!', 'raw': 'This dataset could improve multilingual speech generation and recognition. Opens up many possibilities for global media, language learning, and accessibility!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explore it: ', 'raw': 'Explore it: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'amphion/Emilia'}, 'url': 'https://huggingface.co/datasets/amphion/Emilia', 'raw': 'https://huggingface.co/datasets/amphion/Emilia'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AIAudio', 'raw': '#AIAudio'}]","Exciting news for audio AI enthusiasts! 🎙️🌍 + +The Emilia dataset dropped last week, and it's a cool one: +- 101k+ hours of high-quality audio +- 6 languages: 🇨🇳 🇺🇸 🇯🇵 🇰🇷 🇩🇪 🇫🇷 +- Diverse content: talk shows, interviews, debates, sports commentary, audiobooks + +This dataset could improve multilingual speech generation and recognition. Opens up many possibilities for global media, language learning, and accessibility! + +Explore it: https://huggingface.co/datasets/amphion/Emilia + +#AIAudio","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/Eq1wjflgiNHli6bJqcxkv.png'}]",[],"[{'reaction': '🚀', 'users': ['YaTharThShaRma999', 'ajibawa-2023', 'maywell', 'osanseviero', 'Dihelson', 'John6666', 'reach-vb', 'louisbrulenaudet', 'ngphuchoangnam'], 'count': 9}, {'reaction': '❤️', 'users': ['Dihelson', 'reach-vb', 'HarryHe', 'ngphuchoangnam'], 'count': 4}, {'reaction': '👀', 'users': ['Dihelson', 'HyperBlaze', 'reach-vb', 'ngphuchoangnam'], 'count': 4}, {'reaction': '😔', 'users': ['skcandx'], 'count': 1}]",2024-07-15 15:50:56,2024-07-15 15:50:56.673,[],/posts/fdaudens/778836619881171,2690,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/627b9f3f4d0858f0034efbb9/2Qnattrzv6qvqiZVVfV5x.png,4353.0,WizardLM,WizardLM,574698793995338,"[{'type': 'text', 'value': '🔥 🔥🔥', 'raw': '🔥 🔥🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Excited to announce WizardLM new Paper: Auto Evol-Instruct!', 'raw': 'Excited to announce WizardLM new Paper: Auto Evol-Instruct!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐦 Twitter: ', 'raw': '🐦 Twitter: '}, {'type': 'link', 'href': 'https://x.com/WizardLM_AI/status/1812857977122202087', 'raw': 'https://x.com/WizardLM_AI/status/1812857977122202087'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📃 Paper: ', 'raw': '📃 Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2406.00770', 'raw': 'https://arxiv.org/pdf/2406.00770'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 1. Fully AI-Powered Pipeline', 'raw': '🤖 1. Fully AI-Powered Pipeline'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Auto Evol-Instruct automatically involves an iterative process of optimizing an Evol-Instruct V1 into an optimal one. The pipeline consists of two critical stages: Evol Trajectory Analysis, where the optimizer LLM analyzes the issues and failures exposed in instruction evolution performed by the evol LLM, and Evolving Method Optimization, where the optimizer LLM addresses these issues to progressively develop an effective evolving method. The optimal evolving method is then used to convert the entire instruction dataset into more diverse and complex forms, facilitating improved instruction tuning.', 'raw': 'Auto Evol-Instruct automatically involves an iterative process of optimizing an Evol-Instruct V1 into an optimal one. The pipeline consists of two critical stages: Evol Trajectory Analysis, where the optimizer LLM analyzes the issues and failures exposed in instruction evolution performed by the evol LLM, and Evolving Method Optimization, where the optimizer LLM addresses these issues to progressively develop an effective evolving method. The optimal evolving method is then used to convert the entire instruction dataset into more diverse and complex forms, facilitating improved instruction tuning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📈2. Scaling Evol-Instruct with Arena Learning', 'raw': '📈2. Scaling Evol-Instruct with Arena Learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With Auto Evol-Instruct, the evolutionary synthesis data of WizardLM-2 has scaled up from WizardLM-1 to dozens of domains, covering tasks in all aspects of large language models. This allows Arena Learning to train and learn from an almost infinite pool of high-difficulty instruction data, fully unlocking all the potential of Arena Learning.', 'raw': 'With Auto Evol-Instruct, the evolutionary synthesis data of WizardLM-2 has scaled up from WizardLM-1 to dozens of domains, covering tasks in all aspects of large language models. This allows Arena Learning to train and learn from an almost infinite pool of high-difficulty instruction data, fully unlocking all the potential of Arena Learning.'}]","🔥 🔥🔥 +Excited to announce WizardLM new Paper: Auto Evol-Instruct! + +🐦 Twitter: https://x.com/WizardLM_AI/status/1812857977122202087 + +📃 Paper: https://arxiv.org/pdf/2406.00770 + +🤖 1. Fully AI-Powered Pipeline + +Auto Evol-Instruct automatically involves an iterative process of optimizing an Evol-Instruct V1 into an optimal one. The pipeline consists of two critical stages: Evol Trajectory Analysis, where the optimizer LLM analyzes the issues and failures exposed in instruction evolution performed by the evol LLM, and Evolving Method Optimization, where the optimizer LLM addresses these issues to progressively develop an effective evolving method. The optimal evolving method is then used to convert the entire instruction dataset into more diverse and complex forms, facilitating improved instruction tuning. + +📈2. Scaling Evol-Instruct with Arena Learning + +With Auto Evol-Instruct, the evolutionary synthesis data of WizardLM-2 has scaled up from WizardLM-1 to dozens of domains, covering tasks in all aspects of large language models. This allows Arena Learning to train and learn from an almost infinite pool of high-difficulty instruction data, fully unlocking all the potential of Arena Learning.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/627b9f3f4d0858f0034efbb9/rcI89rGwrLkYa_tE7LTCm.png'}]",[],"[{'reaction': '🚀', 'users': ['WizardLM', 'YaTharThShaRma999', 'John6666', 'AndrewZeng', 'ajibawa-2023', 'osanseviero', 'mmhamdy', 'Best-codes', 'KingNish', 'AbheekG', 'sohampnow', 'reach-vb', 'Alexandro14', 'KakaRotting', 'aarmn', 'mohd43'], 'count': 16}, {'reaction': '👍', 'users': ['AndrewZeng', 'mmhamdy', 'AbheekG', 'TitleOS', 'reach-vb', 'davickyz', 'Josiah654'], 'count': 7}, {'reaction': '❤️', 'users': ['AbheekG', 'reach-vb', 'liyongsea', '1ucky1uke', 'davickyz', 'Josiah654', 'digvijay25'], 'count': 7}]",2024-07-15 15:42:45,2024-10-28 10:09:16.328,"[{'_id': '64d323b950310d7ad397fce7', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64d323b950310d7ad397fce7/KKGL1Y5ZeQSBTqaeKQCaY.png', 'fullname': 'Ryan Miller', 'name': 'Meroar', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/WizardLM/574698793995338,16024,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1658166666371-noauth.png,44.0,Stepanov,Ihor,615276720615026,"[{'type': 'text', 'value': '🚀 Meet Our New Line of Efficient and Accurate Zero-Shot Classifiers! 🚀', 'raw': '🚀 Meet Our New Line of Efficient and Accurate Zero-Shot Classifiers! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The new architecture brings better inter-label understanding and can solve complex classification tasks at a single forward pass.', 'raw': 'The new architecture brings better inter-label understanding and can solve complex classification tasks at a single forward pass.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Applications:', 'raw': 'Key Applications:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Multi-class classification (up to 100 classes in a single run)', 'raw': '✅ Multi-class classification (up to 100 classes in a single run)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Topic classification', 'raw': '✅ Topic classification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Sentiment analysis', 'raw': '✅ Sentiment analysis'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Event classification', 'raw': '✅ Event classification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Prompt-based constrained classification', 'raw': '✅ Prompt-based constrained classification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Natural Language Inference', 'raw': '✅ Natural Language Inference'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Multi- and single-label classification', 'raw': '✅ Multi- and single-label classification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'knowledgator/gliclass-6661838823756265f2ac3848'}, 'url': 'https://huggingface.co/collections/knowledgator/gliclass-6661838823756265f2ac3848', 'raw': 'https://huggingface.co/collections/knowledgator/gliclass-6661838823756265f2ac3848'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'knowledgator/GLiClass_SandBox'}, 'url': 'https://huggingface.co/spaces/knowledgator/GLiClass_SandBox', 'raw': 'https://huggingface.co/spaces/knowledgator/GLiClass_SandBox'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'knowledgator/gliclass-base-v1.0-lw'}, 'url': 'https://huggingface.co/knowledgator/gliclass-base-v1.0-lw', 'raw': 'https://huggingface.co/knowledgator/gliclass-base-v1.0-lw'}, {'type': 'new_line', 'raw': '\n'}]","🚀 Meet Our New Line of Efficient and Accurate Zero-Shot Classifiers! 🚀 + +The new architecture brings better inter-label understanding and can solve complex classification tasks at a single forward pass. + +Key Applications: +✅ Multi-class classification (up to 100 classes in a single run) +✅ Topic classification +✅ Sentiment analysis +✅ Event classification +✅ Prompt-based constrained classification +✅ Natural Language Inference +✅ Multi- and single-label classification + +https://huggingface.co/collections/knowledgator/gliclass-6661838823756265f2ac3848 +https://huggingface.co/spaces/knowledgator/GLiClass_SandBox +https://huggingface.co/knowledgator/gliclass-base-v1.0-lw +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62d59dd5a2de3ae5ea6fc262/gzUpKEfJITAYAVq7T7NQ8.png'}]",[],"[{'reaction': '🔥', 'users': ['reach-vb', 'Alexandro14', 'Ihor'], 'count': 3}]",2024-07-15 09:29:57,2024-07-15 09:30:42.028,[],/posts/Ihor/615276720615026,903,,0 +/avatars/816ec718442fa541a21a02b29070e08a.svg,,Joo-Haeng Lee,joohaeng,408808506555116,"[{'type': 'mention', 'user': 'seyonec', 'raw': '@seyonec'}, {'type': 'text', 'value': "" It's a great help to experiment with ChemBERTa. "", 'raw': "" It's a great help to experiment with ChemBERTa. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'BTW, There are several models that handle SMILES in the model repository. Can you kindly recommend the one with the best performance in handling hERG dataset?', 'raw': 'BTW, There are several models that handle SMILES in the model repository. Can you kindly recommend the one with the best performance in handling hERG dataset?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://paperswithcode.com/dataset/herg', 'raw': 'https://paperswithcode.com/dataset/herg'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Best,', 'raw': 'Best,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Joo-Haeng Lee, Pebblous Inc.', 'raw': 'Joo-Haeng Lee, Pebblous Inc.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'http://pebblous.ai', 'raw': 'http://pebblous.ai'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","@seyonec It's a great help to experiment with ChemBERTa. + +BTW, There are several models that handle SMILES in the model repository. Can you kindly recommend the one with the best performance in handling hERG dataset? +https://paperswithcode.com/dataset/herg + +Best, +Joo-Haeng Lee, Pebblous Inc. +http://pebblous.ai ","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62d562635c29ac61fec7ae38/wtnDWsuOXfDFkRfVShCzI.mp4'}]","[{'_id': '5e8bf72583fb7a4fc16792ca', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1586230819365-noauth.png', 'fullname': 'Seyone Chithrananda', 'name': 'seyonec', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 59}]","[{'reaction': '🔥', 'users': ['AIDANIAI', 'reach-vb', 'osanseviero'], 'count': 3}]",2024-07-15 06:40:50,2024-07-15 06:40:50.723,[],/posts/joohaeng/408808506555116,836,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/-mckPlAIvU8v7-MIA3lrc.jpeg,1.0,Georgeos Díaz-Montexano,GeorgeosDiazMontexano,540200274459008,"[{'type': 'text', 'value': 'Please, help with is!!!! ""You have exceeded your GPU quota... "" I have paid the Pro service of 9 Euros per month, and everything remains the same, I barely do four tasks and I get the restriction message that I have to wait many minutes or more than an hour. So why have I paid the 9 Euros a month, if everything remains the same, with the same restrictions as if it were a free account? I have written, but they do not respond to me. Please help me.', 'raw': 'Please, help with is!!!! ""You have exceeded your GPU quota... "" I have paid the Pro service of 9 Euros per month, and everything remains the same, I barely do four tasks and I get the restriction message that I have to wait many minutes or more than an hour. So why have I paid the 9 Euros a month, if everything remains the same, with the same restrictions as if it were a free account? I have written, but they do not respond to me. Please help me.'}]","Please, help with is!!!! ""You have exceeded your GPU quota... "" I have paid the Pro service of 9 Euros per month, and everything remains the same, I barely do four tasks and I get the restriction message that I have to wait many minutes or more than an hour. So why have I paid the 9 Euros a month, if everything remains the same, with the same restrictions as if it were a free account? I have written, but they do not respond to me. Please help me.",[],[],[],2024-07-08 22:15:21,2024-07-13 15:05:57.147,"[{'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}, {'_id': '65bb837dbfb878f46c77de4c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg', 'fullname': 'Prithiv Sakthi', 'name': 'prithivMLmods', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1957, 'isFollowing': False}, {'_id': '6658315bfb28d37fe56c6a2f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/-mckPlAIvU8v7-MIA3lrc.jpeg', 'fullname': 'Georgeos Díaz-Montexano', 'name': 'GeorgeosDiazMontexano', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/GeorgeosDiazMontexano/540200274459008,602,,15 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,248827863204916,"[{'type': 'text', 'value': 'Very cool dataset for journalists and historians just dropped: 2.7 million unique public domain U.S. news wire articles (1878-1977) 📰🕰️', 'raw': 'Very cool dataset for journalists and historians just dropped: 2.7 million unique public domain U.S. news wire articles (1878-1977) 📰🕰️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is a goldmine for tracking historical events & newspaper coverage trends! An example? If we still wonder whether gender diversity in the media is important... ""Only 4.6% of disambiguated entity mentions refer to women, and the most mentioned woman is Golda Meir.""', 'raw': 'This is a goldmine for tracking historical events & newspaper coverage trends! An example? If we still wonder whether gender diversity in the media is important... ""Only 4.6% of disambiguated entity mentions refer to women, and the most mentioned woman is Golda Meir.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Bonus:', 'raw': 'Bonus:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Locations in these articles are georeferenced', 'raw': '- Locations in these articles are georeferenced'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Topics are tagged using customized neural topic classification', 'raw': '- Topics are tagged using customized neural topic classification'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Named entities are recognized,', 'raw': '- Named entities are recognized,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Individuals are disambiguated to Wikipedia using a novel entity disambiguation model ', 'raw': '- Individuals are disambiguated to Wikipedia using a novel entity disambiguation model '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Anyone thinking of cool AI projects with this data? Maybe tracking the spread of news stories over time & space?', 'raw': 'Anyone thinking of cool AI projects with this data? Maybe tracking the spread of news stories over time & space?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐆𝐨 𝐝𝐞𝐞𝐩𝐞𝐫', 'raw': '𝐆𝐨 𝐝𝐞𝐞𝐩𝐞𝐫'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Digg into the dataset: ', 'raw': '👉 Digg into the dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'dell-research-harvard/newswire'}, 'url': 'https://huggingface.co/datasets/dell-research-harvard/newswire', 'raw': 'https://huggingface.co/datasets/dell-research-harvard/newswire'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' 👉 Read the paper: ', 'raw': ' 👉 Read the paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2406.09490'}, 'url': 'https://huggingface.co/papers/2406.09490', 'raw': 'https://huggingface.co/papers/2406.09490', 'label': 'Newswire: A Large-Scale Structured Database of a Century of Historical\n News (2406.09490)'}]","Very cool dataset for journalists and historians just dropped: 2.7 million unique public domain U.S. news wire articles (1878-1977) 📰🕰️ + +This is a goldmine for tracking historical events & newspaper coverage trends! An example? If we still wonder whether gender diversity in the media is important... ""Only 4.6% of disambiguated entity mentions refer to women, and the most mentioned woman is Golda Meir."" + +Bonus: +- Locations in these articles are georeferenced +- Topics are tagged using customized neural topic classification +- Named entities are recognized, +- Individuals are disambiguated to Wikipedia using a novel entity disambiguation model + +Anyone thinking of cool AI projects with this data? Maybe tracking the spread of news stories over time & space? + +𝐆𝐨 𝐝𝐞𝐞𝐩𝐞𝐫 +👉 Digg into the dataset: https://huggingface.co/datasets/dell-research-harvard/newswire + 👉 Read the paper: https://huggingface.co/papers/2406.09490","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/zH29jywrTnHPTmhPVJtzI.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/IxY_1CUK65ZnQpiPlbV3k.png'}]",[],"[{'reaction': '🤝', 'users': ['ZeroWw', 'prithivMLmods', 'Dihelson'], 'count': 3}, {'reaction': '🚀', 'users': ['louisbrulenaudet', 'Dihelson'], 'count': 2}]",2024-07-08 19:46:36,2024-07-08 19:46:36.578,[],/posts/fdaudens/248827863204916,2051,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,583121742800907,"[{'type': 'text', 'value': 'Running billion parameter models, sometimes we forget what it all is! 🤔💡', 'raw': 'Running billion parameter models, sometimes we forget what it all is! 🤔💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Matrix multiplication 🧮✨', 'raw': 'Matrix multiplication 🧮✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'While there are multiple plays on memory management and caching to speed it up! 🏎️💾⚡', 'raw': 'While there are multiple plays on memory management and caching to speed it up! 🏎️💾⚡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The naive way of Matrix multiplication becomes even more fascinating the bigger these models get! 🤯📈', 'raw': 'The naive way of Matrix multiplication becomes even more fascinating the bigger these models get! 🤯📈'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'QKV for the win! 🏆🔑📚', 'raw': 'QKV for the win! 🏆🔑📚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://github.com/wentasah/mmul-anim', 'raw': 'https://github.com/wentasah/mmul-anim'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Slides: ', 'raw': 'Slides: '}, {'type': 'link', 'href': 'https://cw.fel.cvut.cz/wiki/_media/courses/b4m36esw/esw09_2019.pdf', 'raw': 'https://cw.fel.cvut.cz/wiki/_media/courses/b4m36esw/esw09_2019.pdf'}, {'type': 'text', 'value': ' 📑🎓', 'raw': ' 📑🎓'}]","Running billion parameter models, sometimes we forget what it all is! 🤔💡 + +Matrix multiplication 🧮✨ + +While there are multiple plays on memory management and caching to speed it up! 🏎️💾⚡ + +The naive way of Matrix multiplication becomes even more fascinating the bigger these models get! 🤯📈 + +QKV for the win! 🏆🔑📚 + +GitHub: https://github.com/wentasah/mmul-anim +Slides: https://cw.fel.cvut.cz/wiki/_media/courses/b4m36esw/esw09_2019.pdf 📑🎓","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/OjbpZtcg6IXpLlC_AIzGL.mp4'}]",[],"[{'reaction': '😎', 'users': ['GPT007'], 'count': 1}]",2024-07-08 18:32:40,2024-07-08 18:57:40.799,"[{'_id': '662f7599987978b5a2a75b04', 'avatarUrl': '/avatars/9005535061d658c53fbb7167b2a9b51f.svg', 'fullname': 'Tennyson', 'name': 'Mandark424', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/singhsidhukuldeep/583121742800907,589,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6459fa0f5b3111fbe83286e1/E6Buqu8Wd9WmIHKOCZXCc.jpeg,235.0,Louis Brulé Naudet,louisbrulenaudet,520896157294536,"[{'type': 'text', 'value': ""Introducing the first two projects on the HFforLegal community: the 'Laws' dataset and the associated search tool based on "", 'raw': ""Introducing the first two projects on the HFforLegal community: the 'Laws' dataset and the associated search tool based on ""}, {'type': 'mention', 'user': 'nreimers', 'raw': '@nreimers'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'tomaarsen', 'raw': '@tomaarsen'}, {'type': 'text', 'value': ""'s Sentence Transformers library 🤗"", 'raw': ""'s Sentence Transformers library 🤗""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The objective of these two tools is to centralize in a single format a set of rules from different countries and legal systems in order to facilitate NLP in the field of comparative law, enabling more accurate and comprehensive legal analysis across different jurisdictions 🌍', 'raw': 'The objective of these two tools is to centralize in a single format a set of rules from different countries and legal systems in order to facilitate NLP in the field of comparative law, enabling more accurate and comprehensive legal analysis across different jurisdictions 🌍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the dataset : ', 'raw': 'Link to the dataset : '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HFforLegal/laws'}, 'url': 'https://huggingface.co/datasets/HFforLegal/laws', 'raw': 'https://huggingface.co/datasets/HFforLegal/laws'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the space: ', 'raw': 'Link to the space: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'HFforLegal/laws-retrieval'}, 'url': 'https://huggingface.co/spaces/HFforLegal/laws-retrieval', 'raw': 'https://huggingface.co/spaces/HFforLegal/laws-retrieval'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We need your contributions to enrich this new knowledge base, and you will find in the 'Laws' dataset all the information you need to format your data and submit them to the appropriate split."", 'raw': ""We need your contributions to enrich this new knowledge base, and you will find in the 'Laws' dataset all the information you need to format your data and submit them to the appropriate split.""}]","Introducing the first two projects on the HFforLegal community: the 'Laws' dataset and the associated search tool based on @nreimers and @tomaarsen's Sentence Transformers library 🤗 + +The objective of these two tools is to centralize in a single format a set of rules from different countries and legal systems in order to facilitate NLP in the field of comparative law, enabling more accurate and comprehensive legal analysis across different jurisdictions 🌍 + +Link to the dataset : https://huggingface.co/datasets/HFforLegal/laws +Link to the space: https://huggingface.co/spaces/HFforLegal/laws-retrieval + +We need your contributions to enrich this new knowledge base, and you will find in the 'Laws' dataset all the information you need to format your data and submit them to the appropriate split.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6459fa0f5b3111fbe83286e1/GgqOYvDlNh73dF6Zd-cM5.jpeg'}]","[{'_id': '5eff4688ff69163f6f59e66c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1596792577829-5eff4688ff69163f6f59e66c.jpeg', 'fullname': 'Nils Reimers', 'name': 'nreimers', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 92}, {'_id': '6317233cc92fd6fee317e030', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png', 'fullname': 'Tom Aarsen', 'name': 'tomaarsen', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2266}]","[{'reaction': '❤️', 'users': ['tomaarsen', 'GPT007', 'AtAndDev', 'Nymbo'], 'count': 4}]",2024-07-08 18:13:07,2024-07-08 18:13:07.544,[],/posts/louisbrulenaudet/520896157294536,2129,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,890020620506496,"[{'type': 'text', 'value': 'New cookbook!', 'raw': 'New cookbook!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I show to to make agentic RAG using Transformers Agents.', 'raw': 'I show to to make agentic RAG using Transformers Agents.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Compared to vanilla RAG, agentic RAG can:', 'raw': 'Compared to vanilla RAG, agentic RAG can:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Reformulate the query', 'raw': '✅ Reformulate the query'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Critique the retrived content to re-retrieve if needed', 'raw': '✅ Critique the retrived content to re-retrieve if needed'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ Score increase of 8.5%! 💪 (Llama-3-70B-judge)', 'raw': '➡️ Score increase of 8.5%! 💪 (Llama-3-70B-judge)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read it here 👉 ', 'raw': 'Read it here 👉 '}, {'type': 'link', 'href': 'https://huggingface.co/learn/cookbook/agent_rag', 'raw': 'https://huggingface.co/learn/cookbook/agent_rag'}]","New cookbook! + +I show to to make agentic RAG using Transformers Agents. + +Compared to vanilla RAG, agentic RAG can: +✅ Reformulate the query +✅ Critique the retrived content to re-retrieve if needed + +➡️ Score increase of 8.5%! 💪 (Llama-3-70B-judge) + +Read it here 👉 https://huggingface.co/learn/cookbook/agent_rag",[],[],"[{'reaction': '🔥', 'users': ['merve', 'sergiopaniego', 'GPT007', 'AtAndDev'], 'count': 4}, {'reaction': '❤️', 'users': ['merve', 'GPT007', 'osanseviero', 'AtAndDev'], 'count': 4}, {'reaction': '👍', 'users': ['merve', 'GPT007', 'AtAndDev'], 'count': 3}]",2024-07-08 15:16:16,2024-07-08 15:16:40.147,[],/posts/m-ric/890020620506496,1876,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,241823096811696,"[{'type': 'text', 'value': 'Animate a portrait with a driving video. Lots of potential fun here 😅 ', 'raw': 'Animate a portrait with a driving video. Lots of potential fun here 😅 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'KwaiVGI/LivePortrait'}, 'url': 'https://huggingface.co/spaces/KwaiVGI/LivePortrait', 'raw': 'https://huggingface.co/spaces/KwaiVGI/LivePortrait'}]",Animate a portrait with a driving video. Lots of potential fun here 😅 https://huggingface.co/spaces/KwaiVGI/LivePortrait,"[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/hGweHtuONHQLxkkZyniLB.mp4'}]",[],"[{'reaction': '🔥', 'users': ['prithivMLmods', 'Ramikan-BR', 'GPT007', 'victor', 'osanseviero', 'Yersel'], 'count': 6}, {'reaction': '🚀', 'users': ['Ramikan-BR', 'GPT007', 'osanseviero'], 'count': 3}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '❤️', 'users': ['ali0une'], 'count': 1}]",2024-07-08 14:25:37,2024-07-08 14:25:37.077,[],/posts/fdaudens/241823096811696,2192,,0 +/avatars/b9a6d8e11ec7a62ca2b819e0b6c37222.svg,2349.0,gokay aydogan,gokaygokay,330158692626177,"[{'type': 'text', 'value': 'Kolors with VLM support', 'raw': 'Kolors with VLM support'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've built a space for using Kolors image generation model with captioner models and prompt enhancers."", 'raw': ""I've built a space for using Kolors image generation model with captioner models and prompt enhancers.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Space with VLM and Prompt Enhancer', 'raw': '- Space with VLM and Prompt Enhancer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'gokaygokay/KolorsPlusPlus'}, 'url': 'https://huggingface.co/spaces/gokaygokay/KolorsPlusPlus', 'raw': 'https://huggingface.co/spaces/gokaygokay/KolorsPlusPlus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Original Space for model', 'raw': '- Original Space for model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'gokaygokay/Kolors'}, 'url': 'https://huggingface.co/spaces/gokaygokay/Kolors', 'raw': 'https://huggingface.co/spaces/gokaygokay/Kolors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Captioner VLMs', 'raw': '- Captioner VLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'gokaygokay/sd3-long-captioner-v2'}, 'url': 'https://huggingface.co/gokaygokay/sd3-long-captioner-v2', 'raw': 'https://huggingface.co/gokaygokay/sd3-long-captioner-v2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Florence-2-base'}, 'url': 'https://huggingface.co/microsoft/Florence-2-base', 'raw': 'https://huggingface.co/microsoft/Florence-2-base'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Prompt Enhancers', 'raw': '- Prompt Enhancers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'gokaygokay/Lamini-Prompt-Enchance-Long'}, 'url': 'https://huggingface.co/gokaygokay/Lamini-Prompt-Enchance-Long', 'raw': 'https://huggingface.co/gokaygokay/Lamini-Prompt-Enchance-Long'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'gokaygokay/Lamini-Prompt-Enchance'}, 'url': 'https://huggingface.co/gokaygokay/Lamini-Prompt-Enchance', 'raw': 'https://huggingface.co/gokaygokay/Lamini-Prompt-Enchance'}, {'type': 'new_line', 'raw': '\n'}]","Kolors with VLM support + +I've built a space for using Kolors image generation model with captioner models and prompt enhancers. + +- Space with VLM and Prompt Enhancer + https://huggingface.co/spaces/gokaygokay/KolorsPlusPlus + +- Original Space for model +https://huggingface.co/spaces/gokaygokay/Kolors + +- Captioner VLMs +- https://huggingface.co/gokaygokay/sd3-long-captioner-v2 + +- https://huggingface.co/microsoft/Florence-2-base + +- Prompt Enhancers +- https://huggingface.co/gokaygokay/Lamini-Prompt-Enchance-Long + +- https://huggingface.co/gokaygokay/Lamini-Prompt-Enchance +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/630899601dd1e3075d975785/a8V0hLE0jgUNsdXH7Cq6I.png'}]",[],"[{'reaction': '🔥', 'users': ['John6666', 'ucsahin', 'kramp', 'osanseviero', 'Ramikan-BR', 'yoeldcd', 'tenet', 'Joseph717171', 'AtAndDev', 'Wok'], 'count': 10}, {'reaction': '🤯', 'users': ['Wok'], 'count': 1}, {'reaction': '👍', 'users': ['Wok'], 'count': 1}]",2024-07-08 09:53:00,2024-07-08 09:55:26.573,[],/posts/gokaygokay/330158692626177,6206,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/628debe0ce274a882affe104/KY1QYa603yff-Sm6wUUEq.png,25.0,Zhimin Zhao,zhiminy,333361610370068,"[{'type': 'text', 'value': 'Hey everyone!', 'raw': 'Hey everyone!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Our team just dropped something cool! 🎉 We've published a new paper on arxiv diving into the foundation model leaderboards across different platforms. We've analyzed the content, operational workflows, and common issues of these leaderboards. From this, we came up with two new concepts: Leaderboard Operations (LBOps) and leaderboard smells."", 'raw': ""Our team just dropped something cool! 🎉 We've published a new paper on arxiv diving into the foundation model leaderboards across different platforms. We've analyzed the content, operational workflows, and common issues of these leaderboards. From this, we came up with two new concepts: Leaderboard Operations (LBOps) and leaderboard smells.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We also put together an awesome list with nearly 300 of the latest leaderboards, development tools, and publishing organizations. You can check it out here: ', 'raw': 'We also put together an awesome list with nearly 300 of the latest leaderboards, development tools, and publishing organizations. You can check it out here: '}, {'type': 'link', 'href': 'https://github.com/SAILResearch/awesome-foundation-model-leaderboards', 'raw': 'https://github.com/SAILResearch/awesome-foundation-model-leaderboards'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you find it useful or interesting, give us a follow or drop a comment. We'd love to hear your thoughts and get your support! ✨"", 'raw': ""If you find it useful or interesting, give us a follow or drop a comment. We'd love to hear your thoughts and get your support! ✨""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to the paper: ', 'raw': 'Link to the paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2407.04065', 'raw': 'https://arxiv.org/abs/2407.04065'}]","Hey everyone! + +Our team just dropped something cool! 🎉 We've published a new paper on arxiv diving into the foundation model leaderboards across different platforms. We've analyzed the content, operational workflows, and common issues of these leaderboards. From this, we came up with two new concepts: Leaderboard Operations (LBOps) and leaderboard smells. + +We also put together an awesome list with nearly 300 of the latest leaderboards, development tools, and publishing organizations. You can check it out here: https://github.com/SAILResearch/awesome-foundation-model-leaderboards + +If you find it useful or interesting, give us a follow or drop a comment. We'd love to hear your thoughts and get your support! ✨ + +Link to the paper: https://arxiv.org/abs/2407.04065",[],[],"[{'reaction': '👀', 'users': ['zhiminy', 'osanseviero', 'louisbrulenaudet', 'Tonic'], 'count': 4}]",2024-07-08 03:21:23,2024-07-09 04:41:16.291,[],/posts/zhiminy/333361610370068,2004,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f278507e923d665e616271b/tWFuswXOTXtvMdL8zSrr_.png,18.0,Rishabh Bhardwaj,RishabhBhardwaj,993060645926299,"[{'type': 'text', 'value': 'Excited to announce the release of the community version of our guardrails: WalledGuard-C!', 'raw': 'Excited to announce the release of the community version of our guardrails: WalledGuard-C!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Feel free to use it—compared to Meta’s guardrails, it offers superior performance, being 4x faster. Most importantly, it's free for nearly any use!"", 'raw': ""Feel free to use it—compared to Meta’s guardrails, it offers superior performance, being 4x faster. Most importantly, it's free for nearly any use!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link: ', 'raw': 'Link: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'walledai/walledguard-c'}, 'url': 'https://huggingface.co/walledai/walledguard-c', 'raw': 'https://huggingface.co/walledai/walledguard-c'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#AISafety', 'raw': '#AISafety'}]","Excited to announce the release of the community version of our guardrails: WalledGuard-C! + +Feel free to use it—compared to Meta’s guardrails, it offers superior performance, being 4x faster. Most importantly, it's free for nearly any use! + +Link: https://huggingface.co/walledai/walledguard-c + +#AISafety",[],[],"[{'reaction': '👍', 'users': ['Mehyaar', 'osanseviero', 'RishabhBhardwaj', 'prithivMLmods'], 'count': 4}]",2024-07-07 19:42:31,2024-07-08 14:16:17.736,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}]",/posts/RishabhBhardwaj/993060645926299,2147,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63f4fcd871a5d395c71dc34e/ej2xshmjs3RvSNU9dHPz7.jpeg,6.0,Maks,Kkordik,500754105377818,"[{'type': 'text', 'value': 'Anyone knows how to use SD3 for inpaint task ? :)', 'raw': 'Anyone knows how to use SD3 for inpaint task ? :)'}, {'type': 'new_line', 'raw': '\n'}]","Anyone knows how to use SD3 for inpaint task ? :) +",[],[],[],2024-07-07 17:37:00,2024-07-07 17:37:00.826,[],/posts/Kkordik/500754105377818,831,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/642f4c789b2484d7d8551a93/0lH4YXcbZa-Xlzj6ESo7F.jpeg,10.0,Yihe Deng,ydeng9,957785370728366,"[{'type': 'text', 'value': 'Check out our new benchmark paper on LLM agents for global events forecasting! ', 'raw': 'Check out our new benchmark paper on LLM agents for global events forecasting! '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2407.01231'}, 'url': 'https://huggingface.co/papers/2407.01231', 'raw': 'https://huggingface.co/papers/2407.01231', 'label': 'MIRAI: Evaluating LLM Agents for Event Forecasting (2407.01231)'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📜 Arxiv: ', 'raw': '📜 Arxiv: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2407.01231', 'raw': 'https://arxiv.org/abs/2407.01231'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Project page: ', 'raw': '🔗 Project page: '}, {'type': 'link', 'href': 'https://mirai-llm.github.io', 'raw': 'https://mirai-llm.github.io'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻 GitHub Repo: ', 'raw': '💻 GitHub Repo: '}, {'type': 'link', 'href': 'https://github.com/yecchen/MIRAI', 'raw': 'https://github.com/yecchen/MIRAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Dataset: ', 'raw': '📁 Dataset: '}, {'type': 'link', 'href': 'https://drive.google.com/file/d/1xmSEHZ_wqtBu1AwLpJ8wCDYmT-jRpfrN/view?usp=sharing', 'raw': 'https://drive.google.com/file/d/1xmSEHZ_wqtBu1AwLpJ8wCDYmT-jRpfrN/view?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Interactive Demo Notebook: ', 'raw': '📊 Interactive Demo Notebook: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1QyqT35n6NbtPaNtqQ6A7ILG_GMeRgdnO?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1QyqT35n6NbtPaNtqQ6A7ILG_GMeRgdnO?usp=sharing'}]","Check out our new benchmark paper on LLM agents for global events forecasting! https://huggingface.co/papers/2407.01231 + +📜 Arxiv: https://arxiv.org/abs/2407.01231 +🔗 Project page: https://mirai-llm.github.io +💻 GitHub Repo: https://github.com/yecchen/MIRAI +📁 Dataset: https://drive.google.com/file/d/1xmSEHZ_wqtBu1AwLpJ8wCDYmT-jRpfrN/view?usp=sharing +📊 Interactive Demo Notebook: https://colab.research.google.com/drive/1QyqT35n6NbtPaNtqQ6A7ILG_GMeRgdnO?usp=sharing",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'ydeng9'], 'count': 2}]",2024-07-02 05:39:51,2024-07-02 05:44:20.105,[],/posts/ydeng9/957785370728366,1499,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1630816930903-noauth.jpeg,3.0,Puffy Bird,puffy310,516704912173883,"[{'type': 'text', 'value': 'LLMs are improving at math faster than my math coursework. I appreciate the all the hardworking engineers for helping me through high school.', 'raw': 'LLMs are improving at math faster than my math coursework. I appreciate the all the hardworking engineers for helping me through high school.'}]",LLMs are improving at math faster than my math coursework. I appreciate the all the hardworking engineers for helping me through high school.,[],[],"[{'reaction': '🤝', 'users': ['TuringsSolutions', 'puffy310', 'osanseviero'], 'count': 3}, {'reaction': '❤️', 'users': ['Xurinth'], 'count': 1}]",2024-07-02 04:45:54,2024-07-02 04:45:54.017,[],/posts/puffy310/516704912173883,1542,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/620630b603825909dcbeba35/vNlOtJqdcP3vpAfeHfNvP.jpeg,246.0,Aaron C Wacker,awacke1,469685030853998,"[{'type': 'text', 'value': ""✨🚀 Claude Sonnet 3.5 API. It's already weaving digital magic!"", 'raw': ""✨🚀 Claude Sonnet 3.5 API. It's already weaving digital magic!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠💻 Try it at my space: 🔗 ', 'raw': '🧠💻 Try it at my space: 🔗 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'awacke1/AnthropicClaude3.5Sonnet-ACW'}, 'url': 'https://huggingface.co/spaces/awacke1/AnthropicClaude3.5Sonnet-ACW', 'raw': 'https://huggingface.co/spaces/awacke1/AnthropicClaude3.5Sonnet-ACW'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kudos to ', 'raw': 'Kudos to '}, {'type': 'mention', 'user': 'AnthropicAI', 'raw': '@AnthropicAI'}, {'type': 'text', 'value': ' for this elegant API! 👏 #AI #CodeMagic #AnthropicAI Thanks Huggingface for hosting the best hub in the world for AI development!', 'raw': ' for this elegant API! 👏 #AI #CodeMagic #AnthropicAI Thanks Huggingface for hosting the best hub in the world for AI development!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","✨🚀 Claude Sonnet 3.5 API. It's already weaving digital magic! +🧠💻 Try it at my space: 🔗 https://huggingface.co/spaces/awacke1/AnthropicClaude3.5Sonnet-ACW + +Kudos to @AnthropicAI for this elegant API! 👏 #AI #CodeMagic #AnthropicAI Thanks Huggingface for hosting the best hub in the world for AI development! + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/620630b603825909dcbeba35/UOwC7dFKOdnwUu04G1N9B.png'}]",[],"[{'reaction': '❤️', 'users': ['clem', 'merterbak', 'John6666'], 'count': 3}]",2024-07-02 01:43:24,2025-01-10 08:04:30.443,"[{'_id': '66dfc98dfa10b3e5819eb483', 'avatarUrl': '/avatars/af113eff54405160a313d595c7e7bb0b.svg', 'fullname': 'Tushar Gautam', 'name': 'Tushar07777', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '66f962868d215c6331172ce3', 'avatarUrl': '/avatars/2b3abd4d9045b68fc213946950bdf4cd.svg', 'fullname': ""Rich'Art Dely "", 'name': 'Richartvrai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '64450e1fa56444c355d7713a', 'avatarUrl': '/avatars/4240498a017cd5fb53103a7c5968baad.svg', 'fullname': 'eray', 'name': 'hakaneray', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/awacke1/469685030853998,3151,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,260150398855068,"[{'type': 'text', 'value': '🚀 Transformers are not here to take part but take over... and down goes real-time object detection! 💥', 'raw': '🚀 Transformers are not here to take part but take over... and down goes real-time object detection! 💥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Enter Real-time DEtection Transformer (RT-DETR) 🦾 as suggested capable of real-time object detection. 🎯', 'raw': 'Enter Real-time DEtection Transformer (RT-DETR) 🦾 as suggested capable of real-time object detection. 🎯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Object DEtection Transformer (DETR) is not new (', 'raw': 'Object DEtection Transformer (DETR) is not new ('}, {'type': 'mention', 'user': 'Meta', 'raw': '@Meta'}, {'type': 'text', 'value': ' did it eons ago) but it had the issue of every other transformer, high computational cost 💸', 'raw': ' did it eons ago) but it had the issue of every other transformer, high computational cost 💸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RT-DETR brings an efficient hybrid encoder to expeditiously process multi-scale features by decoupling intra-scale interaction and cross-scale fusion to improve speed 🏎️', 'raw': 'RT-DETR brings an efficient hybrid encoder to expeditiously process multi-scale features by decoupling intra-scale interaction and cross-scale fusion to improve speed 🏎️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Gist is RT-DETR speeds up object detection by redesigning its encoder to process features more efficiently and selecting higher quality initial object queries. ⚡', 'raw': 'Gist is RT-DETR speeds up object detection by redesigning its encoder to process features more efficiently and selecting higher quality initial object queries. ⚡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It also allows adjusting the number of decoder layers to balance speed and accuracy for different real-time scenarios. ⚖️', 'raw': 'It also allows adjusting the number of decoder layers to balance speed and accuracy for different real-time scenarios. ⚖️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This makes RT-DETR faster and more accurate than previous YOLO models. 🏆', 'raw': 'This makes RT-DETR faster and more accurate than previous YOLO models. 🏆'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How much better😎/faster? ⏱️', 'raw': 'How much better😎/faster? ⏱️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RT-DETR-R50 achieved 53.1% AP on COCO and 108 FPS on a T4 GPU, while RT-DETR-R101 achieved 54.3% AP and 74 FPS, outperforming advanced YOLO models in both speed and accuracy. 🚀✨', 'raw': 'RT-DETR-R50 achieved 53.1% AP on COCO and 108 FPS on a T4 GPU, while RT-DETR-R101 achieved 54.3% AP and 74 FPS, outperforming advanced YOLO models in both speed and accuracy. 🚀✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2304.08069'}, 'url': 'https://huggingface.co/papers/2304.08069', 'raw': 'https://huggingface.co/papers/2304.08069', 'label': 'DETRs Beat YOLOs on Real-time Object Detection (2304.08069)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧠 Models: ', 'raw': '🧠 Models: '}, {'type': 'link', 'href': 'https://huggingface.co/models?search=pekingu/rt-detr', 'raw': 'https://huggingface.co/models?search=pekingu/rt-detr'}]","🚀 Transformers are not here to take part but take over... and down goes real-time object detection! 💥 + +Enter Real-time DEtection Transformer (RT-DETR) 🦾 as suggested capable of real-time object detection. 🎯 + +Object DEtection Transformer (DETR) is not new (@Meta did it eons ago) but it had the issue of every other transformer, high computational cost 💸 + +RT-DETR brings an efficient hybrid encoder to expeditiously process multi-scale features by decoupling intra-scale interaction and cross-scale fusion to improve speed 🏎️ + +Gist is RT-DETR speeds up object detection by redesigning its encoder to process features more efficiently and selecting higher quality initial object queries. ⚡ + +It also allows adjusting the number of decoder layers to balance speed and accuracy for different real-time scenarios. ⚖️ + +This makes RT-DETR faster and more accurate than previous YOLO models. 🏆 + +How much better😎/faster? ⏱️ + +RT-DETR-R50 achieved 53.1% AP on COCO and 108 FPS on a T4 GPU, while RT-DETR-R101 achieved 54.3% AP and 74 FPS, outperforming advanced YOLO models in both speed and accuracy. 🚀✨ + +📄 Paper: https://huggingface.co/papers/2304.08069 + +🧠 Models: https://huggingface.co/models?search=pekingu/rt-detr","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/ZG7Cvefh62FnDpEcPNd8-.mp4'}]","[{'_id': '61e8c67cee1e1440121f0240', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61e8c67cee1e1440121f0240/7akLZSb6xJZzbnl0o9Cod.jpeg', 'fullname': 'Jonas', 'name': 'Meta', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '🚀', 'users': ['merterbak', 'clem', 'osanseviero', 'louisbrulenaudet'], 'count': 4}, {'reaction': '👍', 'users': ['blender66cat', 'Artificial-superintelligence'], 'count': 2}, {'reaction': '🔥', 'users': ['zaanind'], 'count': 1}]",2024-07-01 22:32:05,2024-07-01 22:32:05.701,[],/posts/singhsidhukuldeep/260150398855068,1538,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1628885133347-6116d0584ef9fdfbf45dc4d9.jpeg,284.0,Mohamed Rashad,MohamedRashad,175081092992561,"[{'type': 'text', 'value': 'Just updated ', 'raw': 'Just updated '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MohamedRashad/timm-leaderboard'}, 'url': 'https://huggingface.co/spaces/MohamedRashad/timm-leaderboard', 'raw': 'https://huggingface.co/spaces/MohamedRashad/timm-leaderboard'}, {'type': 'text', 'value': ' with fuzzy search for people who want to search for a certian vision model', 'raw': ' with fuzzy search for people who want to search for a certian vision model'}]",Just updated https://huggingface.co/spaces/MohamedRashad/timm-leaderboard with fuzzy search for people who want to search for a certian vision model,[],[],[],2024-07-01 16:34:56,2024-07-01 16:34:56.349,[],/posts/MohamedRashad/175081092992561,814,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,756732380261553,"[{'type': 'text', 'value': 'Search Hugging Face datasets by column names with a new experimental API! This API allows you to:', 'raw': 'Search Hugging Face datasets by column names with a new experimental API! This API allows you to:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Search for question-answering datasets that include context', 'raw': '- Search for question-answering datasets that include context'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Find alpaca-style datasets', 'raw': '- Find alpaca-style datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Locate DPO datasets', 'raw': '- Locate DPO datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out here: ', 'raw': 'Try it out here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'librarian-bots/dataset-column-search-api'}, 'url': 'https://huggingface.co/spaces/librarian-bots/dataset-column-search-api', 'raw': 'https://huggingface.co/spaces/librarian-bots/dataset-column-search-api'}, {'type': 'text', 'value': ', or explore real-world applications in this notebook: ', 'raw': ', or explore real-world applications in this notebook: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'librarian-bots/dataset-column-search-api'}, 'url': 'https://huggingface.co/spaces/librarian-bots/dataset-column-search-api/blob/main/dataset_search_client_notebook.ipynb', 'raw': 'https://huggingface.co/spaces/librarian-bots/dataset-column-search-api/blob/main/dataset_search_client_notebook.ipynb'}]","Search Hugging Face datasets by column names with a new experimental API! This API allows you to: + +- Search for question-answering datasets that include context +- Find alpaca-style datasets +- Locate DPO datasets + +Try it out here: https://huggingface.co/spaces/librarian-bots/dataset-column-search-api, or explore real-world applications in this notebook: https://huggingface.co/spaces/librarian-bots/dataset-column-search-api/blob/main/dataset_search_client_notebook.ipynb",[],[],"[{'reaction': '❤️', 'users': ['clem', 'osanseviero', 'merterbak', 'John6666', 'louisbrulenaudet'], 'count': 5}, {'reaction': '👍', 'users': ['fffiloni'], 'count': 1}]",2024-07-01 16:33:50,2024-07-01 16:33:50.207,[],/posts/davanstrien/756732380261553,2208,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,701707924042367,"[{'type': 'text', 'value': 'Real-time DEtection Transformer (RT-DETR) landed in transformers 🤩 with Apache 2.0 license 😍', 'raw': 'Real-time DEtection Transformer (RT-DETR) landed in transformers 🤩 with Apache 2.0 license 😍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔖 models: ', 'raw': '🔖 models: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'PekingU'}, 'url': 'https://huggingface.co/PekingU', 'raw': 'https://huggingface.co/PekingU', 'image': 'https://www.gravatar.com/avatar/a2cf4e4900d33c5e795f772e8327843b?d=retro&size=100'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔖 demo: ', 'raw': '🔖 demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'merve/RT-DETR-tracking-coco'}, 'url': 'https://huggingface.co/spaces/merve/RT-DETR-tracking-coco', 'raw': 'https://huggingface.co/spaces/merve/RT-DETR-tracking-coco'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 paper: ', 'raw': '📝 paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2304.08069'}, 'url': 'https://huggingface.co/papers/2304.08069', 'raw': 'https://huggingface.co/papers/2304.08069', 'label': 'DETRs Beat YOLOs on Real-time Object Detection (2304.08069)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 notebook: ', 'raw': '📖 notebook: '}, {'type': 'link', 'href': 'https://github.com/merveenoyan/example_notebooks/blob/main/RT_DETR_Notebook.ipynb', 'raw': 'https://github.com/merveenoyan/example_notebooks/blob/main/RT_DETR_Notebook.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'YOLO models are known to be super fast for real-time computer vision, but they have a downside with being volatile to NMS 🥲', 'raw': 'YOLO models are known to be super fast for real-time computer vision, but they have a downside with being volatile to NMS 🥲'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Transformer-based models on the other hand are computationally not as efficient 🥲', 'raw': 'Transformer-based models on the other hand are computationally not as efficient 🥲'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Isn't there something in between? Enter RT-DETR!"", 'raw': ""Isn't there something in between? Enter RT-DETR!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors combined CNN backbone, multi-stage hybrid decoder (combining convs and attn) with a transformer decoder. In the paper, authors also claim one can adjust speed by changing decoder layers without retraining altogether. ', 'raw': 'The authors combined CNN backbone, multi-stage hybrid decoder (combining convs and attn) with a transformer decoder. In the paper, authors also claim one can adjust speed by changing decoder layers without retraining altogether. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors find out that the model performs better in terms of speed and accuracy compared to the previous state-of-the-art. 🤩', 'raw': 'The authors find out that the model performs better in terms of speed and accuracy compared to the previous state-of-the-art. 🤩'}, {'type': 'new_line', 'raw': '\n'}]","Real-time DEtection Transformer (RT-DETR) landed in transformers 🤩 with Apache 2.0 license 😍 + +🔖 models: https://huggingface.co/PekingU +🔖 demo: https://huggingface.co/spaces/merve/RT-DETR-tracking-coco +📝 paper: https://huggingface.co/papers/2304.08069 +📖 notebook: https://github.com/merveenoyan/example_notebooks/blob/main/RT_DETR_Notebook.ipynb + +YOLO models are known to be super fast for real-time computer vision, but they have a downside with being volatile to NMS 🥲 + +Transformer-based models on the other hand are computationally not as efficient 🥲 + +Isn't there something in between? Enter RT-DETR! + +The authors combined CNN backbone, multi-stage hybrid decoder (combining convs and attn) with a transformer decoder. In the paper, authors also claim one can adjust speed by changing decoder layers without retraining altogether. +The authors find out that the model performs better in terms of speed and accuracy compared to the previous state-of-the-art. 🤩 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/YxCWX0s_aPy5exTVxOMTB.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['merterbak', 'hllj', 'clem', 'osanseviero', 'multimodalart', 'not-lain', 'netynet', 'sudzdpn', 'louisbrulenaudet', 'mathieu-chauvet', 'Kabil007', 'Rbrq'], 'count': 12}, {'reaction': '👍', 'users': ['talkative', 'Tom-Neverwinter', 'ooaykac', 'GPT007'], 'count': 4}]",2024-07-01 15:20:56,2024-07-01 15:20:56.270,[],/posts/merve/701707924042367,5170,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/635dd6cd4fabde0df74aeae6/23c0uEOr7RWDtSLDBzkPD.png,573.0,araminta_k,alvdansen,736464349744598,"[{'type': 'text', 'value': '**How I train a LoRA: m3lt style training overview**', 'raw': '**How I train a LoRA: m3lt style training overview**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've just written an article that takes a step by step approach to outlining the method that I used to train the 'm3lt' lora, a blended style model. "", 'raw': ""I've just written an article that takes a step by step approach to outlining the method that I used to train the 'm3lt' lora, a blended style model. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've used the LoRA Ease trainer by "", 'raw': ""I've used the LoRA Ease trainer by ""}, {'type': 'mention', 'user': 'multimodalart', 'raw': '@multimodalart'}, {'type': 'text', 'value': ' :D', 'raw': ' :D'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/alvdansen/training-lora-m3lt', 'raw': 'https://huggingface.co/blog/alvdansen/training-lora-m3lt'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'multimodalart/lora-ease'}, 'url': 'https://huggingface.co/spaces/multimodalart/lora-ease', 'raw': 'https://huggingface.co/spaces/multimodalart/lora-ease'}]","**How I train a LoRA: m3lt style training overview** + +I've just written an article that takes a step by step approach to outlining the method that I used to train the 'm3lt' lora, a blended style model. + +I've used the LoRA Ease trainer by @multimodalart :D + +https://huggingface.co/blog/alvdansen/training-lora-m3lt +https://huggingface.co/spaces/multimodalart/lora-ease","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/635dd6cd4fabde0df74aeae6/1U0NMk0JzEOHJ-SSGQLIH.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/635dd6cd4fabde0df74aeae6/4w6SowJY_q1nEy_43i14u.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/635dd6cd4fabde0df74aeae6/RZgoHZpNkfo7k_4W0Rz1U.png'}]","[{'_id': '624bebf604abc7ebb01789af', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1649143001781-624bebf604abc7ebb01789af.jpeg', 'fullname': 'Apolinário from multimodal AI art', 'name': 'multimodalart', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4512}]","[{'reaction': '❤️', 'users': ['ijohn07', 'clem', 'blender66cat', 'not-lain', 'thedarktrumpet', 'ashemvets', 'glpx', 'IkeaMan', 'stinkyyy'], 'count': 9}, {'reaction': '🚀', 'users': ['victor', 'multimodalart', 'rishiguin', 'clem', 'not-lain', 'louisbrulenaudet'], 'count': 6}, {'reaction': '🔥', 'users': ['victor', 'multimodalart', 'clem', 'not-lain'], 'count': 4}]",2024-07-01 14:13:37,2024-09-18 17:21:32.617,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '635dd6cd4fabde0df74aeae6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/635dd6cd4fabde0df74aeae6/23c0uEOr7RWDtSLDBzkPD.png', 'fullname': 'araminta_k', 'name': 'alvdansen', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 573, 'isFollowing': False}, {'_id': '6639d768d48a3da6e4c9fbe1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6639d768d48a3da6e4c9fbe1/4u6zXlZkiu8YQbnZuP4hr.jpeg', 'fullname': 'Kim', 'name': 'KKKKKIIIIIIIMMMMM', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '6508c5c5b44445e9b36ed94e', 'avatarUrl': '/avatars/181782227fdd2ae04a504af7c79a19bc.svg', 'fullname': 'Maiya', 'name': 'DualChimerra', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/alvdansen/736464349744598,3175,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,215969412331682,"[{'type': 'text', 'value': '5,000 new repos (models, datasets, spaces) are created EVERY DAY on HF now. The community is amazing!', 'raw': '5,000 new repos (models, datasets, spaces) are created EVERY DAY on HF now. The community is amazing!'}]","5,000 new repos (models, datasets, spaces) are created EVERY DAY on HF now. The community is amazing!",[],[],"[{'reaction': '❤️', 'users': ['umuthopeyildirim', 'osanseviero', 'victor', 'tail-call', 'paulml', 'jeremy-london', 'brunatrevelin', 'Clausss', 'ZeroWw', 'xi0v', 'yuriachermann', 'merterbak', 'nroggendorff', 'sted97', 'Jaromir', 'not-lain', 'netynet', 'Heresynetwork', 'SixOpen', 'tarob0ba', '3thn', 'digiplay', 'prithivMLmods', 'GPT007', 'Nymbo'], 'count': 25}, {'reaction': '🚀', 'users': ['victor', 'ngxson', 'Clausss', 'ZeroWw', 'xi0v', 'nold', 'yuriachermann', 'nroggendorff', 'InferenceIllusionist', 'sted97', 'danielus', 'not-lain', 'netynet', 'Heresynetwork', 'moock', '3thn', 'kramp', 'GPT007', 'nampdn-ai', 'IAmTheCollector'], 'count': 20}, {'reaction': '🤗', 'users': ['digiplay', 'louisbrulenaudet', 'GPT007', 'emirhanbilgic'], 'count': 4}, {'reaction': '👍', 'users': ['digiplay', 'GPT007'], 'count': 2}, {'reaction': '🤝', 'users': ['digiplay', 'GPT007'], 'count': 2}, {'reaction': '😎', 'users': ['GPT007'], 'count': 1}]",2024-07-01 12:40:49,2024-07-01 12:40:49.355,[],/posts/clem/215969412331682,5790,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e4318d616b09a31220980d6/24rMJ_vPh3gW9ZEmj64xr.png,3180.0,Manuel Romero,mrm8488,799935689571130,"[{'type': 'text', 'value': '🚨Exciting news for the Multilingual Synthetic Data Community!🚨', 'raw': '🚨Exciting news for the Multilingual Synthetic Data Community!🚨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I’ve taken inspiration from the MAGPIE paper on Llama-3-8B-instruct and extended its capabilities. Here’s what’s new!', 'raw': 'I’ve taken inspiration from the MAGPIE paper on Llama-3-8B-instruct and extended its capabilities. Here’s what’s new!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗞 The MAGPIE paper showcased that if you use the instruction-tuned version (', 'raw': '🗞 The MAGPIE paper showcased that if you use the instruction-tuned version ('}, {'type': 'inline_code', 'code': 'Llama-3-8B-instruct', 'raw': '`Llama-3-8B-instruct`'}, {'type': 'text', 'value': ') to generate synthetic instructions and then fine-tune the base version (', 'raw': ') to generate synthetic instructions and then fine-tune the base version ('}, {'type': 'inline_code', 'code': 'Llama-3-8B', 'raw': '`Llama-3-8B`'}, {'type': 'text', 'value': ') on this dataset, you can improve even the it-tuned version', 'raw': ') on this dataset, you can improve even the it-tuned version'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤔 While reading a script by Sebastian Raschka, PhD, I wondered: Could these advancements be replicated in other languages? Specifically, could they benefit non-English datasets?', 'raw': '🤔 While reading a script by Sebastian Raschka, PhD, I wondered: Could these advancements be replicated in other languages? Specifically, could they benefit non-English datasets?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🎉 And the answer is YES! At least for Spanish. I've successfully adapted the techniques for Spanish, proving the model's flexibility and multilingual capabilities."", 'raw': ""🎉 And the answer is YES! At least for Spanish. I've successfully adapted the techniques for Spanish, proving the model's flexibility and multilingual capabilities.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👩\u200d💻 To make this accessible, I created a basic script (heavily inspired by the Sebastian Raschka one) that allows you to generate similar datasets using ', 'raw': '👩\u200d💻 To make this accessible, I created a basic script (heavily inspired by the Sebastian Raschka one) that allows you to generate similar datasets using '}, {'type': 'inline_code', 'code': 'ollama', 'raw': '`ollama`'}, {'type': 'text', 'value': ' models (initially phi and llama3) automatically and upload it to the Hugging Face Hub!', 'raw': ' models (initially phi and llama3) automatically and upload it to the Hugging Face Hub!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[Script](', 'raw': '[Script]('}, {'type': 'link', 'href': 'https://gist.github.com/mrm8488/4650a5e3cc45523798a527a3446eb312', 'raw': 'https://gist.github.com/mrm8488/4650a5e3cc45523798a527a3446eb312'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Explore the datasets 📚 generated using our new script! ', 'raw': '🔍 Explore the datasets 📚 generated using our new script! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- [Llama-3-8B](', 'raw': '- [Llama-3-8B]('}, {'type': 'link', 'href': 'https://huggingface.co/datasets/mrm8488/dataset_llama3_5000_samples_es_4231_filtered', 'raw': 'https://huggingface.co/datasets/mrm8488/dataset_llama3_5000_samples_es_4231_filtered'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- [Phi-3-medium](', 'raw': '- [Phi-3-medium]('}, {'type': 'link', 'href': 'https://huggingface.co/datasets/mrm8488/dataset_phi3-medium_5000_samples_es_3906_filtered', 'raw': 'https://huggingface.co/datasets/mrm8488/dataset_phi3-medium_5000_samples_es_3906_filtered'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- [Phi-3-mini](', 'raw': '- [Phi-3-mini]('}, {'type': 'link', 'href': 'https://huggingface.co/datasets/mrm8488/dataset_phi3_5000_samples_es_3282_filtered', 'raw': 'https://huggingface.co/datasets/mrm8488/dataset_phi3_5000_samples_es_3282_filtered'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Note: These datasets have basic filtering. Apply additional quality filters before using them to fine-tune large language models.', 'raw': 'Note: These datasets have basic filtering. Apply additional quality filters before using them to fine-tune large language models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Inspiration and base script:', 'raw': 'Inspiration and base script:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/rasbt/LLMs-from-scratch/blob/main/ch07/05_dataset-generation/llama3-ollama.ipynb', 'raw': 'https://github.com/rasbt/LLMs-from-scratch/blob/main/ch07/05_dataset-generation/llama3-ollama.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.linkedin.com/feed/update/urn:li:activity:7210982019751661568/', 'raw': 'https://www.linkedin.com/feed/update/urn:li:activity:7210982019751661568/'}, {'type': 'new_line', 'raw': '\n'}]","🚨Exciting news for the Multilingual Synthetic Data Community!🚨 + +I’ve taken inspiration from the MAGPIE paper on Llama-3-8B-instruct and extended its capabilities. Here’s what’s new! + +🗞 The MAGPIE paper showcased that if you use the instruction-tuned version (`Llama-3-8B-instruct`) to generate synthetic instructions and then fine-tune the base version (`Llama-3-8B`) on this dataset, you can improve even the it-tuned version + +🤔 While reading a script by Sebastian Raschka, PhD, I wondered: Could these advancements be replicated in other languages? Specifically, could they benefit non-English datasets? + +🎉 And the answer is YES! At least for Spanish. I've successfully adapted the techniques for Spanish, proving the model's flexibility and multilingual capabilities. + +👩‍💻 To make this accessible, I created a basic script (heavily inspired by the Sebastian Raschka one) that allows you to generate similar datasets using `ollama` models (initially phi and llama3) automatically and upload it to the Hugging Face Hub! +[Script](https://gist.github.com/mrm8488/4650a5e3cc45523798a527a3446eb312) + + +🔍 Explore the datasets 📚 generated using our new script! + +- [Llama-3-8B](https://huggingface.co/datasets/mrm8488/dataset_llama3_5000_samples_es_4231_filtered) +- [Phi-3-medium](https://huggingface.co/datasets/mrm8488/dataset_phi3-medium_5000_samples_es_3906_filtered) +- [Phi-3-mini](https://huggingface.co/datasets/mrm8488/dataset_phi3_5000_samples_es_3282_filtered) + + +Note: These datasets have basic filtering. Apply additional quality filters before using them to fine-tune large language models. + +Inspiration and base script: +https://github.com/rasbt/LLMs-from-scratch/blob/main/ch07/05_dataset-generation/llama3-ollama.ipynb +https://www.linkedin.com/feed/update/urn:li:activity:7210982019751661568/ +",[],[],"[{'reaction': '❤️', 'users': ['Davipar', 'osanseviero', 'apol', 'anakin87', 'davanstrien', 'victor', 'ucsahin', 'floschne', 'GPT007', 'vikas', 'Marvin73', 'pavaldeveloper', 'mrm8488', 'Taylor658', 'adorkin'], 'count': 15}]",2024-07-01 12:19:29,2024-11-09 16:20:26.480,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '5e4318d616b09a31220980d6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e4318d616b09a31220980d6/24rMJ_vPh3gW9ZEmj64xr.png', 'fullname': 'Manuel Romero', 'name': 'mrm8488', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3180, 'isFollowing': False}, {'_id': '635efe2b398ff343c4fa209b', 'avatarUrl': '/avatars/53ebfcab852efd849a848a26dc65751c.svg', 'fullname': 'elsatch', 'name': 'elsatch', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638, 'isFollowing': False}, {'_id': '5fc6879e1c5ee87b1164876d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5fc6879e1c5ee87b1164876d/Tjnm_lv0Bq0gPbFOTDH6E.jpeg', 'fullname': 'Huu Nguyen', 'name': 'huu-ontocord', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 49, 'isFollowing': False}]",/posts/mrm8488/799935689571130,6520,,7 +https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg,2185.0,Hafedh Hichri,not-lain,846357156531571,"[{'type': 'text', 'value': 'Hello beautiful people.', 'raw': 'Hello beautiful people.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I wanted to thank everyone that read my blogpost and I am glad to share that we have achieved 11000 readers 🥳', 'raw': 'I wanted to thank everyone that read my blogpost and I am glad to share that we have achieved 11000 readers 🥳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I couldn't have done this without you, so once again thanks a lot everyone for the support 💖"", 'raw': ""I couldn't have done this without you, so once again thanks a lot everyone for the support 💖""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you haven't already you can read my blog post at: "", 'raw': ""If you haven't already you can read my blog post at: ""}, {'type': 'link', 'href': 'https://huggingface.co/blog/not-lain/rag-chatbot-using-llama3', 'raw': 'https://huggingface.co/blog/not-lain/rag-chatbot-using-llama3'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Hello beautiful people. +I wanted to thank everyone that read my blogpost and I am glad to share that we have achieved 11000 readers 🥳 +I couldn't have done this without you, so once again thanks a lot everyone for the support 💖 +If you haven't already you can read my blog post at: https://huggingface.co/blog/not-lain/rag-chatbot-using-llama3 ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/DtpOWjMB9IlNK45cpB0VQ.png'}]",[],"[{'reaction': '❤️', 'users': ['YaTharThShaRma999', 'KvrParaskevi', 'ijohn07', 'mohamed-khalil'], 'count': 4}]",2024-06-25 17:14:13,2024-06-25 17:23:33.731,[],/posts/not-lain/846357156531571,1483,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg,112.0,atayloraerospace,Taylor658,138732505334784,"[{'type': 'text', 'value': '🌍 Cohere for AI has announced that this July and August, it is inviting researchers from around the world to join Expedition Aya, a global initiative focused on launching projects using multilingual tools like Aya 23 and Aya 101. 🌐', 'raw': '🌍 Cohere for AI has announced that this July and August, it is inviting researchers from around the world to join Expedition Aya, a global initiative focused on launching projects using multilingual tools like Aya 23 and Aya 101. 🌐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Participants can start by joining the Aya server, where all organization will take place. They can share ideas and connect with others on Discord and the signup sheet. Various events will be hosted to help people find potential team members. 🤝', 'raw': 'Participants can start by joining the Aya server, where all organization will take place. They can share ideas and connect with others on Discord and the signup sheet. Various events will be hosted to help people find potential team members. 🤝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To support the projects, Cohere API credits will be issued. 💰', 'raw': 'To support the projects, Cohere API credits will be issued. 💰'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Over the course of six weeks, weekly check-in calls are also planned to help teams stay on track and receive support with using Aya. 🖥️', 'raw': 'Over the course of six weeks, weekly check-in calls are also planned to help teams stay on track and receive support with using Aya. 🖥️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The expedition will wrap up at the end of August with a closing event to showcase everyone’s work and plan next steps. Participants who complete the expedition will also receive some Expedition Aya swag. 🎉', 'raw': 'The expedition will wrap up at the end of August with a closing event to showcase everyone’s work and plan next steps. Participants who complete the expedition will also receive some Expedition Aya swag. 🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Links:', 'raw': 'Links:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Join the Aya Discord: ', 'raw': 'Join the Aya Discord: '}, {'type': 'link', 'href': 'https://discord.com/invite/q9QRYkjpwk', 'raw': 'https://discord.com/invite/q9QRYkjpwk'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Visit the Expedition Aya Minisite: ', 'raw': 'Visit the Expedition Aya Minisite: '}, {'type': 'link', 'href': 'https://sites.google.com/cohere.com/expedition-aya/home', 'raw': 'https://sites.google.com/cohere.com/expedition-aya/home'}, {'type': 'new_line', 'raw': '\n'}]","🌍 Cohere for AI has announced that this July and August, it is inviting researchers from around the world to join Expedition Aya, a global initiative focused on launching projects using multilingual tools like Aya 23 and Aya 101. 🌐 + +Participants can start by joining the Aya server, where all organization will take place. They can share ideas and connect with others on Discord and the signup sheet. Various events will be hosted to help people find potential team members. 🤝 + +To support the projects, Cohere API credits will be issued. 💰 + +Over the course of six weeks, weekly check-in calls are also planned to help teams stay on track and receive support with using Aya. 🖥️ + +The expedition will wrap up at the end of August with a closing event to showcase everyone’s work and plan next steps. Participants who complete the expedition will also receive some Expedition Aya swag. 🎉 + +Links: +Join the Aya Discord: https://discord.com/invite/q9QRYkjpwk +Visit the Expedition Aya Minisite: https://sites.google.com/cohere.com/expedition-aya/home +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/XsyWDzNV2D6zrhki1aMgo.jpeg'}]",[],"[{'reaction': '🚀', 'users': ['monsoon-nlp', 'louisbrulenaudet'], 'count': 2}, {'reaction': '🔥', 'users': ['takarajordan'], 'count': 1}]",2024-06-25 16:45:53,2024-07-09 10:32:25.131,"[{'_id': '6613f7ae43c4456e13ecbdcc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/aqVOJmgtsBbB6BFeLpL7h.jpeg', 'fullname': 'Jordan Legg', 'name': 'takarajordan', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 40, 'isFollowing': False}]",/posts/Taylor658/138732505334784,702,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64dcd996e3e44e8000cdd9cb/jLrOsWrKrKYx0NhVkJaF0.jpeg,57.0,Umitcan Sahin,ucsahin,784239590755279,"[{'type': 'text', 'value': 'Florence-2 has a great capability of detecting various objects in a zero-shot setting with the task prompt """". However, if you want to detect specific objects that the base model is not able to in its current form, you can easily finetune it for this particular task. Below I show how to finetune the model to detect tables in a given image, but a similar process can be applied to detect any objects. Thanks to ', 'raw': 'Florence-2 has a great capability of detecting various objects in a zero-shot setting with the task prompt """". However, if you want to detect specific objects that the base model is not able to in its current form, you can easily finetune it for this particular task. Below I show how to finetune the model to detect tables in a given image, but a similar process can be applied to detect any objects. Thanks to '}, {'type': 'mention', 'user': 'andito', 'raw': '@andito'}, {'type': 'text', 'value': ', ', 'raw': ', '}, {'type': 'mention', 'user': 'merve', 'raw': '@merve'}, {'type': 'text', 'value': ', and ', 'raw': ', and '}, {'type': 'mention', 'user': 'SkalskiP', 'raw': '@SkalskiP'}, {'type': 'text', 'value': ' for sharing the fix for finetuning the Florence-2 model. Please also check their great blog post at ', 'raw': ' for sharing the fix for finetuning the Florence-2 model. Please also check their great blog post at '}, {'type': 'link', 'href': 'https://huggingface.co/blog/finetune-florence2', 'raw': 'https://huggingface.co/blog/finetune-florence2'}, {'type': 'text', 'value': '. ', 'raw': '. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Colab notebook: ', 'raw': 'Colab notebook: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1Y8GVjwzBIgfmfD3ZypDX5H1JA_VG0YDL?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1Y8GVjwzBIgfmfD3ZypDX5H1JA_VG0YDL?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Finetuned model: ', 'raw': 'Finetuned model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ucsahin/Florence-2-large-TableDetection'}, 'url': 'https://huggingface.co/ucsahin/Florence-2-large-TableDetection', 'raw': 'https://huggingface.co/ucsahin/Florence-2-large-TableDetection'}]","Florence-2 has a great capability of detecting various objects in a zero-shot setting with the task prompt """". However, if you want to detect specific objects that the base model is not able to in its current form, you can easily finetune it for this particular task. Below I show how to finetune the model to detect tables in a given image, but a similar process can be applied to detect any objects. Thanks to @andito, @merve, and @SkalskiP for sharing the fix for finetuning the Florence-2 model. Please also check their great blog post at https://huggingface.co/blog/finetune-florence2. + +Colab notebook: https://colab.research.google.com/drive/1Y8GVjwzBIgfmfD3ZypDX5H1JA_VG0YDL?usp=sharing +Finetuned model: https://huggingface.co/ucsahin/Florence-2-large-TableDetection",[],"[{'_id': '65d66b494bbd0d92b641cdbb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d66b494bbd0d92b641cdbb/6-7dm7B-JxcoS1QlCPdMN.jpeg', 'fullname': 'Andres Marafioti', 'name': 'andito', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 244}, {'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786}, {'_id': '60f84d4d85dbbb185d2e9a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60f84d4d85dbbb185d2e9a53/Mlc0XjAgQR2cuhGNchz07.jpeg', 'fullname': 'Piotr Skalski', 'name': 'SkalskiP', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3343}]","[{'reaction': '🚀', 'users': ['andito', 'qubvel-hf', 'John6666', 'Metin', 'osanseviero', 'sosoai', 'kevinjeswani', 'danelcsb', 'dblasko', 'maddosaientisuto', 'radames'], 'count': 11}, {'reaction': '🔥', 'users': ['alperiox', 'radames'], 'count': 2}]",2024-06-25 15:43:36,2025-03-06 10:06:47.385,"[{'_id': '6579e0eaa9e58aec614e9d97', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6579e0eaa9e58aec614e9d97/zklEVBvTRHoIvVjuVLRom.jpeg', 'fullname': 'Sangbum Choi', 'name': 'danelcsb', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6, 'isFollowing': False}, {'_id': '64dcd996e3e44e8000cdd9cb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64dcd996e3e44e8000cdd9cb/jLrOsWrKrKYx0NhVkJaF0.jpeg', 'fullname': 'Umitcan Sahin', 'name': 'ucsahin', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 57, 'isFollowing': False}, {'_id': '625dc5dd943e346492b6cdb7', 'avatarUrl': '/avatars/ee643b69c74bd7ae5ad59df4d05ac38d.svg', 'fullname': 'Abdul Hanan Ch', 'name': 'maddosaientisuto', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '659e5506b03a0f0e4f648a2f', 'avatarUrl': '/avatars/fd2da0bbd9845ba9b2091b4ae24c83a6.svg', 'fullname': 'Chi Haozhuang', 'name': 'FisherZz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/ucsahin/784239590755279,4309,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/63f731c7d36951307fcca6bf/DMd5-Pt7YHC0agbAQ1xUc.png,193.0,Mitko Vasilev,mitkox,807406794863793,"[{'type': 'text', 'value': ""I'm decentralizing my AI end2end, from the AI model distribution to on device AI inferencing. llama-ipfs - llama.cpp integrated with Interplanetary File System for distributing peer2peer and loading AI models without the need for cloud storage or AI model Hub."", 'raw': ""I'm decentralizing my AI end2end, from the AI model distribution to on device AI inferencing. llama-ipfs - llama.cpp integrated with Interplanetary File System for distributing peer2peer and loading AI models without the need for cloud storage or AI model Hub.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'llama.cpp now supports decentralized inferencing with RPC, allowing the distribution of workload across all home devices. This functionality can be enhanced with a P2P ad-hoc VPN, enabling the extension of distributed inferencing to any device on any network.', 'raw': 'llama.cpp now supports decentralized inferencing with RPC, allowing the distribution of workload across all home devices. This functionality can be enhanced with a P2P ad-hoc VPN, enabling the extension of distributed inferencing to any device on any network.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Imagine an open-source AI that's as decentralized as a potluck dinner - everyone brings something to the table, and there's ZERO need for blockchain. It's like a digital fortress, with security and privacy baked right in, not to mention a dollop of integrity and trust. This could be the secret sauce for an enterprise AI platform, complete with an integrated IT policy. It might just be the cherry on top for the next generation of Apple Intelligence and Copilot+ PCs."", 'raw': ""Imagine an open-source AI that's as decentralized as a potluck dinner - everyone brings something to the table, and there's ZERO need for blockchain. It's like a digital fortress, with security and privacy baked right in, not to mention a dollop of integrity and trust. This could be the secret sauce for an enterprise AI platform, complete with an integrated IT policy. It might just be the cherry on top for the next generation of Apple Intelligence and Copilot+ PCs.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Make sure you own your AI. AI in the cloud is not aligned with you; it's aligned with the company that owns it."", 'raw': ""Make sure you own your AI. AI in the cloud is not aligned with you; it's aligned with the company that owns it.""}]","I'm decentralizing my AI end2end, from the AI model distribution to on device AI inferencing. llama-ipfs - llama.cpp integrated with Interplanetary File System for distributing peer2peer and loading AI models without the need for cloud storage or AI model Hub. + +llama.cpp now supports decentralized inferencing with RPC, allowing the distribution of workload across all home devices. This functionality can be enhanced with a P2P ad-hoc VPN, enabling the extension of distributed inferencing to any device on any network. + +Imagine an open-source AI that's as decentralized as a potluck dinner - everyone brings something to the table, and there's ZERO need for blockchain. It's like a digital fortress, with security and privacy baked right in, not to mention a dollop of integrity and trust. This could be the secret sauce for an enterprise AI platform, complete with an integrated IT policy. It might just be the cherry on top for the next generation of Apple Intelligence and Copilot+ PCs. + +Make sure you own your AI. AI in the cloud is not aligned with you; it's aligned with the company that owns it.",[],[],"[{'reaction': '🔥', 'users': ['rvpierre', 'Ramikan-BR', 'graphicaldot', 'brainhome', 'sa8', 'pegak'], 'count': 6}, {'reaction': '👍', 'users': ['Nerius'], 'count': 1}]",2024-06-25 14:10:12,2024-06-25 14:10:12.030,[],/posts/mitkox/807406794863793,2223,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64862a25cf5ad5e1f0482ef2/61qPUtw9jIl7zpPYmi0VW.jpeg,50.0,David Smooke,Smooke,697001195364732,"[{'type': 'text', 'value': 'NEW #DecentralizeAI Writing Contest, by InternetComputer.org and HackerNoon.com! 😜 ', 'raw': 'NEW #DecentralizeAI Writing Contest, by InternetComputer.org and HackerNoon.com! 😜 '}, {'type': 'link', 'href': 'https://www.contests.hackernoon.com/decentralize-ai-writing-contest', 'raw': 'https://www.contests.hackernoon.com/decentralize-ai-writing-contest'}, {'type': 'text', 'value': ' 🤪', 'raw': ' 🤪'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '""Not going to beat centralized AI with more centralized AI."" - Emad Mostaque', 'raw': '""Not going to beat centralized AI with more centralized AI."" - Emad Mostaque'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To enter, submit a blog post with the #decentralize-ai tag on HackerNoon.', 'raw': 'To enter, submit a blog post with the #decentralize-ai tag on HackerNoon.'}]","NEW #DecentralizeAI Writing Contest, by InternetComputer.org and HackerNoon.com! 😜 https://www.contests.hackernoon.com/decentralize-ai-writing-contest 🤪 + +""Not going to beat centralized AI with more centralized AI."" - Emad Mostaque + +To enter, submit a blog post with the #decentralize-ai tag on HackerNoon.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64862a25cf5ad5e1f0482ef2/exJ53dftLPQJw5m1f0JeA.png'}]",[],"[{'reaction': '🚀', 'users': ['victor'], 'count': 1}]",2024-06-25 13:25:30,2024-06-25 13:25:30.425,[],/posts/Smooke/697001195364732,508,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64cba00d710645aa7b04f281/a_-LPwd4wqRyi8sJ1QxjI.jpeg,75.0,Husnain,Niansuh,617599851628763,"[{'type': 'text', 'value': 'Use GPT-4o + GPT-4-Turbo-Preview + GPT-3.5-Turbo + BingAI', 'raw': 'Use GPT-4o + GPT-4-Turbo-Preview + GPT-3.5-Turbo + BingAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/NiansuhAI/Copilot', 'raw': 'https://huggingface.co/spaces/NiansuhAI/Copilot'}]","Use GPT-4o + GPT-4-Turbo-Preview + GPT-3.5-Turbo + BingAI + +https://huggingface.co/spaces/NiansuhAI/Copilot",[],[],"[{'reaction': '👍', 'users': ['Niansuh', 'British-Rat', 'John6666', 'ayush-thakur02'], 'count': 4}, {'reaction': '🚀', 'users': ['Niansuh', 'alvis44'], 'count': 2}]",2024-06-25 09:29:26,2024-06-26 15:50:27.001,"[{'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}, {'_id': '64cba00d710645aa7b04f281', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64cba00d710645aa7b04f281/a_-LPwd4wqRyi8sJ1QxjI.jpeg', 'fullname': 'Husnain', 'name': 'Niansuh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 75, 'isFollowing': False}]",/posts/Niansuh/617599851628763,1963,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1649681653581-5f7fbd813e94f16a85448745.jpeg,657.0,Sayak Paul,sayakpaul,504772386075668,"[{'type': 'text', 'value': 'Were you aware that we have a dedicated guide on different prompting mechanisms to improve the image generation quality? 🧨', 'raw': 'Were you aware that we have a dedicated guide on different prompting mechanisms to improve the image generation quality? 🧨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Takes you through simple prompt engineering, prompt weighting, prompt enhancement using GPT-2, and more.', 'raw': 'Takes you through simple prompt engineering, prompt weighting, prompt enhancement using GPT-2, and more.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the guide here 🦯', 'raw': 'Check out the guide here 🦯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/docs/diffusers/main/en/using-diffusers/weighted_prompts', 'raw': 'https://huggingface.co/docs/diffusers/main/en/using-diffusers/weighted_prompts'}]","Were you aware that we have a dedicated guide on different prompting mechanisms to improve the image generation quality? 🧨 + +Takes you through simple prompt engineering, prompt weighting, prompt enhancement using GPT-2, and more. + +Check out the guide here 🦯 +https://huggingface.co/docs/diffusers/main/en/using-diffusers/weighted_prompts","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f7fbd813e94f16a85448745/12dTczAyppx9K4if1B-0s.png'}]",[],"[{'reaction': '🔥', 'users': ['ashikurrahman', 'GPT007', 'linoyts', 'louisbrulenaudet', 'radames', 'chuangxinlezhi', 'mynkchaudhry'], 'count': 7}, {'reaction': '🤯', 'users': ['chuangxinlezhi'], 'count': 1}]",2024-06-25 09:08:27,2024-06-25 17:42:42.253,"[{'_id': '6640bbd0220cfa8cbfdce080', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6640bbd0220cfa8cbfdce080/wiAHUu5ewawyipNs0YFBR.png', 'fullname': 'John Smith', 'name': 'John6666', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1076, 'isFollowing': False}]",/posts/sayakpaul/504772386075668,2251,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/653cd3049107029eb004f968/Y4XphXmk8P51GlIi6u9cd.png,28.0,Rickard Edén,neph1,383558582332469,"[{'type': 'text', 'value': ""I've noticed some people are still downloading "", 'raw': ""I've noticed some people are still downloading ""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'neph1/sd-seer-griffin-3b'}, 'url': 'https://huggingface.co/neph1/sd-seer-griffin-3b', 'raw': 'https://huggingface.co/neph1/sd-seer-griffin-3b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Should I make an update based on a more modern architecture? (griffin-3b is llama (1!))', 'raw': 'Should I make an update based on a more modern architecture? (griffin-3b is llama (1!))'}]","I've noticed some people are still downloading https://huggingface.co/neph1/sd-seer-griffin-3b +Should I make an update based on a more modern architecture? (griffin-3b is llama (1!))",[],[],[],2024-06-25 08:57:41,2024-07-22 12:15:14.331,"[{'_id': '66568552bfefce0a722916ff', 'avatarUrl': '/avatars/25883cb9e3e9411428a4b09a4e769fbb.svg', 'fullname': 'gfhgfhgfh', 'name': 'ffghgfh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/neph1/383558582332469,607,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b85aa99ba538c73a7dc78b/gWxtQAvOYn7cXgE_nAy0p.jpeg,35.0,Simone Tedeschi,sted97,676034649939962,"[{'type': 'text', 'value': '📢 Interested in #LLM safety? ', 'raw': '📢 Interested in #LLM safety? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We have just uploaded a new version of ALERT 🚨 on ArXiv with novel insights into the weaknesses and vulnerabilities of LLMs! 👀 ', 'raw': 'We have just uploaded a new version of ALERT 🚨 on ArXiv with novel insights into the weaknesses and vulnerabilities of LLMs! 👀 '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2404.08676', 'raw': 'https://arxiv.org/abs/2404.08676'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For a summary of the paper, read this blog post: ', 'raw': 'For a summary of the paper, read this blog post: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/sted97/alert', 'raw': 'https://huggingface.co/blog/sted97/alert'}, {'type': 'text', 'value': ' 🤗', 'raw': ' 🤗'}]","📢 Interested in #LLM safety? + +We have just uploaded a new version of ALERT 🚨 on ArXiv with novel insights into the weaknesses and vulnerabilities of LLMs! 👀 https://arxiv.org/abs/2404.08676 + +For a summary of the paper, read this blog post: https://huggingface.co/blog/sted97/alert 🤗",[],[],"[{'reaction': '👍', 'users': ['PereLluis13'], 'count': 1}]",2024-06-25 08:15:29,2024-06-25 08:15:29.270,[],/posts/sted97/676034649939962,487,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/656d73ed0bbc114fe6449704/gpteBU9GmKSHRVkRBUHld.png,34.0,Symbol-LLM,Symbol-LLM,355818467715515,"[{'type': 'text', 'value': '📣Thrilled to make public our recent work ENVISIONS !!!', 'raw': '📣Thrilled to make public our recent work ENVISIONS !!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Without human annotations !', 'raw': '- Without human annotations !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Without Distilling Strong LLMs !', 'raw': '- Without Distilling Strong LLMs !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Self-improve LLMs in the environment', 'raw': '- Self-improve LLMs in the environment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Amazing performances on agentic and reasoning tasks', 'raw': '- Amazing performances on agentic and reasoning tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Insightful analysis on ""why"" questions', 'raw': '- Insightful analysis on ""why"" questions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Title: Interactive Evolution: A Neural-Symbolic Self-Training Framework For Large Language Models', 'raw': '📝 Title: Interactive Evolution: A Neural-Symbolic Self-Training Framework For Large Language Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📎 Repo: ', 'raw': '📎 Repo: '}, {'type': 'link', 'href': 'https://github.com/xufangzhi/ENVISIONS', 'raw': 'https://github.com/xufangzhi/ENVISIONS'}]","📣Thrilled to make public our recent work ENVISIONS !!! + +- Without human annotations ! +- Without Distilling Strong LLMs ! +- Self-improve LLMs in the environment +- Amazing performances on agentic and reasoning tasks +- Insightful analysis on ""why"" questions + +📝 Title: Interactive Evolution: A Neural-Symbolic Self-Training Framework For Large Language Models + +📎 Repo: https://github.com/xufangzhi/ENVISIONS",[],[],"[{'reaction': '🔥', 'users': ['Symbol-LLM', 'GPT007', 'John6666'], 'count': 3}, {'reaction': '🚀', 'users': ['Symbol-LLM'], 'count': 1}]",2024-06-25 05:17:34,2024-06-25 07:58:42.528,"[{'_id': '666abbd43263a8feca73e327', 'avatarUrl': '/avatars/c5dd35ecf6b895f01b20fba7aa75124d.svg', 'fullname': 'Jeffery', 'name': 'sjfhsajkf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Symbol-LLM/355818467715515,1781,,1 +/avatars/7a79a12d9f67100b0ea2dfb19dc695d0.svg,6.0,Chen Hou,LegolasS,756619808319174,"[{'type': 'text', 'value': '🤯🤯🤯VERY ROBUST TOOL to control camera motion for videos!!!', 'raw': '🤯🤯🤯VERY ROBUST TOOL to control camera motion for videos!!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Even doesn't need any additional finetuning! It uses inference process of video diffusion directly!!"", 'raw': ""Even doesn't need any additional finetuning! It uses inference process of video diffusion directly!!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it on your own video diffusion model and generate CINEMATIC SHOTS!📸🎥\U0001fae2', 'raw': 'Try it on your own video diffusion model and generate CINEMATIC SHOTS!📸🎥\U0001fae2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check at ', 'raw': 'Check at '}, {'type': 'link', 'href': 'https://lifedecoder.github.io/CamTrol/', 'raw': 'https://lifedecoder.github.io/CamTrol/'}]","🤯🤯🤯VERY ROBUST TOOL to control camera motion for videos!!! + +Even doesn't need any additional finetuning! It uses inference process of video diffusion directly!! + +Try it on your own video diffusion model and generate CINEMATIC SHOTS!📸🎥🫢 + +Check at https://lifedecoder.github.io/CamTrol/","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/660a367942025e0a8b8a2de5/zTAJay-sv15VbqYOQoAl8.qt'}]",[],"[{'reaction': '🤯', 'users': ['chchnii', 'victor', 'shadinpira80', 'KingNish', 'dillfrescott', 'netynet', 'LegolasS', 'Ramikan-BR', 'GPT007', 'PetrLe', 'akshatd001'], 'count': 11}, {'reaction': '🚀', 'users': ['chchnii', 'Taylor658', 'victor', 'rreed-pha', 'KingNish', 'LegolasS', 'Ramikan-BR', 'ucyang'], 'count': 8}, {'reaction': '🔥', 'users': ['chchnii', 'LegolasS', 'ibvhim', 'Ramikan-BR', 'PetrLe'], 'count': 5}, {'reaction': '👍', 'users': ['dillfrescott', 'Ramikan-BR', 'LegolasS', 'kramp', 'PetrLe'], 'count': 5}, {'reaction': '👀', 'users': ['chchnii', 'dillfrescott', 'Ramikan-BR'], 'count': 3}, {'reaction': '❤️', 'users': ['Ramikan-BR', 'shaising', 'PetrLe'], 'count': 3}, {'reaction': '🤗', 'users': ['LegolasS', 'PetrLe'], 'count': 2}]",2024-06-19 03:31:30,2025-04-14 06:00:46.689,"[{'_id': '6246908d8031dcfa9ef6d80b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6246908d8031dcfa9ef6d80b/hVdURjUl1RS2MZf4qOhvI.jpeg', 'fullname': 'Ahmad Mustafa Anis', 'name': 'AhmadMustafa', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}]",/posts/LegolasS/756619808319174,4062,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/4i-p-ep1DIERsD8Wmgsco.png,22.0,NB,Skier8402,471891899352409,"[{'type': 'resource', 'resource': {'type': 'paper', 'id': '2312.16171'}, 'url': 'https://huggingface.co/papers/2312.16171', 'raw': 'https://huggingface.co/papers/2312.16171', 'label': 'Principled Instructions Are All You Need for Questioning LLaMA-1/2,\n GPT-3.5/4 (2312.16171)'}, {'type': 'text', 'value': "" I normally use this to make prompts in the form of a RAG (Retrieval Augmented Generation). For example, here's one from Gemma 7B about articles. "", 'raw': "" I normally use this to make prompts in the form of a RAG (Retrieval Augmented Generation). For example, here's one from Gemma 7B about articles. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '""Please summarize the main ideas of the article \'[Article Title]\' in a concise and informative manner. Focus on highlighting the key points and arguments presented in the article. Keep the summary to around [desired length] words.""', 'raw': '""Please summarize the main ideas of the article \'[Article Title]\' in a concise and informative manner. Focus on highlighting the key points and arguments presented in the article. Keep the summary to around [desired length] words.""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Has anyone else tried this? Do you like the results you are getting?', 'raw': 'Has anyone else tried this? Do you like the results you are getting?'}]","https://huggingface.co/papers/2312.16171 I normally use this to make prompts in the form of a RAG (Retrieval Augmented Generation). For example, here's one from Gemma 7B about articles. + +""Please summarize the main ideas of the article '[Article Title]' in a concise and informative manner. Focus on highlighting the key points and arguments presented in the article. Keep the summary to around [desired length] words."" + +Has anyone else tried this? Do you like the results you are getting?",[],[],[],2024-06-19 02:47:04,2024-06-19 02:47:04.374,[],/posts/Skier8402/471891899352409,899,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/60f2fc91b92afccb7c34b8ed/W2-Nay12Ef4Ltyaf8EKE9.jpeg,118.0,Gabriel Martín Blázquez,gabrielmbmb,632509301461247,"[{'type': 'text', 'value': '⚗️ distilabel 1.2.0 is out and it comes with improved support for structured generation, new tasks for generating datasets for training embedding models, new steps for loading data, MixtureOfAgentsLLM and improved docs.', 'raw': '⚗️ distilabel 1.2.0 is out and it comes with improved support for structured generation, new tasks for generating datasets for training embedding models, new steps for loading data, MixtureOfAgentsLLM and improved docs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We would love to see a few new datasets for training embedding models built with distilabel on the Hub! ❤️', 'raw': 'We would love to see a few new datasets for training embedding models built with distilabel on the Hub! ❤️'}]","⚗️ distilabel 1.2.0 is out and it comes with improved support for structured generation, new tasks for generating datasets for training embedding models, new steps for loading data, MixtureOfAgentsLLM and improved docs. + +We would love to see a few new datasets for training embedding models built with distilabel on the Hub! ❤️",[],[],"[{'reaction': '🔥', 'users': ['monsoon-nlp', 'clem', 'osanseviero', 'victor', 'abunchofrandomwords'], 'count': 5}]",2024-06-18 17:20:04,2024-06-18 17:20:04.566,[],/posts/gabrielmbmb/632509301461247,2508,,0 +/avatars/b9a6d8e11ec7a62ca2b819e0b6c37222.svg,2349.0,gokay aydogan,gokaygokay,847779810698714,"[{'type': 'text', 'value': ""I've fine-tuned three types of PaliGemma image captioner models for generating prompts for Text2Image models. They generate captions similar to prompts we give to the image generation models. I used google/docci and google/imageinwords datasets for fine-tuning. "", 'raw': ""I've fine-tuned three types of PaliGemma image captioner models for generating prompts for Text2Image models. They generate captions similar to prompts we give to the image generation models. I used google/docci and google/imageinwords datasets for fine-tuning. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This one gives you longer captions. ', 'raw': 'This one gives you longer captions. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'gokaygokay/SD3-Long-Captioner'}, 'url': 'https://huggingface.co/spaces/gokaygokay/SD3-Long-Captioner', 'raw': 'https://huggingface.co/spaces/gokaygokay/SD3-Long-Captioner'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This one gives you middle size captions. ', 'raw': 'This one gives you middle size captions. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/gokaygokay/SD3-Long-Captioner-V2', 'raw': 'https://huggingface.co/spaces/gokaygokay/SD3-Long-Captioner-V2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And this one gives you shorter captions. ', 'raw': 'And this one gives you shorter captions. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/gokaygokay/SDXL-Captioner', 'raw': 'https://huggingface.co/spaces/gokaygokay/SDXL-Captioner'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","I've fine-tuned three types of PaliGemma image captioner models for generating prompts for Text2Image models. They generate captions similar to prompts we give to the image generation models. I used google/docci and google/imageinwords datasets for fine-tuning. + +This one gives you longer captions. + +https://huggingface.co/spaces/gokaygokay/SD3-Long-Captioner + +This one gives you middle size captions. + +https://huggingface.co/spaces/gokaygokay/SD3-Long-Captioner-V2 + +And this one gives you shorter captions. + +https://huggingface.co/spaces/gokaygokay/SDXL-Captioner + +",[],[],"[{'reaction': '❤️', 'users': ['clem', 'osanseviero', 'victor', 'OjciecTadeusz', 'emanuelevivoli', 'Warlord-K', 'John6666', 'pcuenq', 'FiditeNemini', 'AmmarNTNU12'], 'count': 10}, {'reaction': '🔥', 'users': ['gabrielchua', 'AIGCalien', 'John6666', 'pcuenq'], 'count': 4}, {'reaction': '👍', 'users': ['Norod78', 'John6666'], 'count': 2}, {'reaction': '🚀', 'users': ['prithivMLmods', 'John6666'], 'count': 2}]",2024-06-18 16:13:54,2024-10-10 09:47:55.536,"[{'_id': '65fe29c741a11343541113b9', 'avatarUrl': '/avatars/2079d5aa1e9bb5091fffe95468162aec.svg', 'fullname': 'NULL', 'name': 'ljnlonoljpiljm', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 17, 'isFollowing': False}, {'_id': '630899601dd1e3075d975785', 'avatarUrl': '/avatars/b9a6d8e11ec7a62ca2b819e0b6c37222.svg', 'fullname': 'gokay aydogan', 'name': 'gokaygokay', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2349, 'isFollowing': False}, {'_id': '62f8ca074588fe31f4361dae', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62f8ca074588fe31f4361dae/F2k343TPD7KVfW3P26IRs.jpeg', 'fullname': 'Yatharth Gupta', 'name': 'Warlord-K', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 38, 'isFollowing': False}, {'_id': '667ade8339d627f679b39a5a', 'avatarUrl': '/avatars/751ab4b02b469b502d9a994d55d4df8a.svg', 'fullname': 'Gomez', 'name': 'raulgb', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/gokaygokay/847779810698714,5993,,10 +https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg,112.0,atayloraerospace,Taylor658,808154088391506,"[{'type': 'text', 'value': 'With the CVPR conference (', 'raw': 'With the CVPR conference ('}, {'type': 'link', 'href': 'https://cvpr.thecvf.com', 'raw': 'https://cvpr.thecvf.com'}, {'type': 'text', 'value': ') in full swing this week in Seattle 🏙️, the competition details for NeurIPS 2024 have just been released.🚀', 'raw': ') in full swing this week in Seattle 🏙️, the competition details for NeurIPS 2024 have just been released.🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Some of the competitions this year include:', 'raw': 'Some of the competitions this year include:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦾 MyoChallenge 2024: Physiological dexterity in bionic humans.', 'raw': '🦾 MyoChallenge 2024: Physiological dexterity in bionic humans.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌌 FAIR Universe: Handling uncertainties in fundamental science.', 'raw': '🌌 FAIR Universe: Handling uncertainties in fundamental science.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧪 BELKA: Chemical assessment through big encoded libraries.', 'raw': '🧪 BELKA: Chemical assessment through big encoded libraries.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏆 HAC: Hacker-Cup AI competition.', 'raw': '🏆 HAC: Hacker-Cup AI competition.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💰 Large-Scale Auction Challenge: Decision-making in competitive games.', 'raw': '💰 Large-Scale Auction Challenge: Decision-making in competitive games.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📶 URGENT Challenge: Signal reconstruction and enhancement.', 'raw': '📶 URGENT Challenge: Signal reconstruction and enhancement.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛡️ LASC 2024: Safety in LLM and AI agents.', 'raw': '🛡️ LASC 2024: Safety in LLM and AI agents.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For more details, check out: ', 'raw': 'For more details, check out: '}, {'type': 'link', 'href': 'https://blog.neurips.cc/2024/06/04/neurips-2024-competitions-announced', 'raw': 'https://blog.neurips.cc/2024/06/04/neurips-2024-competitions-announced'}]","With the CVPR conference (https://cvpr.thecvf.com) in full swing this week in Seattle 🏙️, the competition details for NeurIPS 2024 have just been released.🚀 + +Some of the competitions this year include: + +🦾 MyoChallenge 2024: Physiological dexterity in bionic humans. +🌌 FAIR Universe: Handling uncertainties in fundamental science. +🧪 BELKA: Chemical assessment through big encoded libraries. +🏆 HAC: Hacker-Cup AI competition. +💰 Large-Scale Auction Challenge: Decision-making in competitive games. +📶 URGENT Challenge: Signal reconstruction and enhancement. +🛡️ LASC 2024: Safety in LLM and AI agents. + +For more details, check out: https://blog.neurips.cc/2024/06/04/neurips-2024-competitions-announced",[],[],"[{'reaction': '❤️', 'users': ['dai-tokyo'], 'count': 1}]",2024-06-18 15:53:14,2024-06-18 15:53:14.239,[],/posts/Taylor658/808154088391506,830,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,363479369380557,"[{'type': 'text', 'value': 'Impressive to see Depth Anything V2. See this example I just took with a lot of different depths. ', 'raw': 'Impressive to see Depth Anything V2. See this example I just took with a lot of different depths. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you want to learn more about it, this TLDR by ', 'raw': 'If you want to learn more about it, this TLDR by '}, {'type': 'mention', 'user': 'merve', 'raw': '@merve'}, {'type': 'text', 'value': ' is👌 ', 'raw': ' is👌 '}, {'type': 'link', 'href': 'https://huggingface.co/posts/merve/568638914646708', 'raw': 'https://huggingface.co/posts/merve/568638914646708'}]","Impressive to see Depth Anything V2. See this example I just took with a lot of different depths. + +If you want to learn more about it, this TLDR by @merve is👌 https://huggingface.co/posts/merve/568638914646708","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/95iaI35bZQ72snHNRJm3j.png'}]","[{'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786}]",[],2024-06-18 15:21:14,2024-06-18 15:21:14.963,[],/posts/fdaudens/363479369380557,557,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,568638914646708,"[{'type': 'text', 'value': 'I love Depth Anything V2 😍 ', 'raw': 'I love Depth Anything V2 😍 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It’s Depth Anything, but scaled with both larger teacher model and a gigantic dataset! ', 'raw': 'It’s Depth Anything, but scaled with both larger teacher model and a gigantic dataset! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's a small TLDR of paper with a lot of findings, experiments and more. "", 'raw': ""Here's a small TLDR of paper with a lot of findings, experiments and more. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have also created a collection that has the models, the dataset, the demo and CoreML converted model 😚 ', 'raw': 'I have also created a collection that has the models, the dataset, the demo and CoreML converted model 😚 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/depth-anything-v2-release-6671902e798cd404513ffbf5'}, 'url': 'https://huggingface.co/collections/merve/depth-anything-v2-release-6671902e798cd404513ffbf5', 'raw': 'https://huggingface.co/collections/merve/depth-anything-v2-release-6671902e798cd404513ffbf5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors have analyzed Marigold, a diffusion based model against Depth Anything and found out what’s up with using synthetic images vs real images for MDE:', 'raw': 'The authors have analyzed Marigold, a diffusion based model against Depth Anything and found out what’s up with using synthetic images vs real images for MDE:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔖 Real data has a lot of label noise, inaccurate depth maps (caused by depth sensors missing transparent objects etc) and there are many details overlooked ', 'raw': '🔖 Real data has a lot of label noise, inaccurate depth maps (caused by depth sensors missing transparent objects etc) and there are many details overlooked '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔖 Synthetic data have more precise and detailed depth labels and they are truly ground-truth, but there’s a distribution shift between real and synthetic images, and they have restricted scene coverage', 'raw': '🔖 Synthetic data have more precise and detailed depth labels and they are truly ground-truth, but there’s a distribution shift between real and synthetic images, and they have restricted scene coverage'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors train different image encoders only on synthetic images and find out unless the encoder is very large the model can’t generalize well (but large models generalize inherently anyway) 🧐', 'raw': 'The authors train different image encoders only on synthetic images and find out unless the encoder is very large the model can’t generalize well (but large models generalize inherently anyway) 🧐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But they still fail encountering real images that have wide distribution in labels (e.g. diverse instances of objects) 🥲', 'raw': 'But they still fail encountering real images that have wide distribution in labels (e.g. diverse instances of objects) 🥲'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Depth Anything v2 framework is to..', 'raw': 'Depth Anything v2 framework is to..'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦖 Train a teacher model based on DINOv2-G based on 595K synthetic images', 'raw': '🦖 Train a teacher model based on DINOv2-G based on 595K synthetic images'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏷️ Label 62M real images using teacher model', 'raw': '🏷️ Label 62M real images using teacher model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦕 Train a student model using the real images labelled by teacher ', 'raw': '🦕 Train a student model using the real images labelled by teacher '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Result: 10x faster and more accurate than Marigold! ', 'raw': 'Result: 10x faster and more accurate than Marigold! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors also construct a new benchmark called DA-2K that is less noisy, highly detailed and more diverse! ', 'raw': 'The authors also construct a new benchmark called DA-2K that is less noisy, highly detailed and more diverse! '}, {'type': 'new_line', 'raw': '\n'}]","I love Depth Anything V2 😍 +It’s Depth Anything, but scaled with both larger teacher model and a gigantic dataset! + +Here's a small TLDR of paper with a lot of findings, experiments and more. +I have also created a collection that has the models, the dataset, the demo and CoreML converted model 😚 https://huggingface.co/collections/merve/depth-anything-v2-release-6671902e798cd404513ffbf5 + +The authors have analyzed Marigold, a diffusion based model against Depth Anything and found out what’s up with using synthetic images vs real images for MDE: + +🔖 Real data has a lot of label noise, inaccurate depth maps (caused by depth sensors missing transparent objects etc) and there are many details overlooked + +🔖 Synthetic data have more precise and detailed depth labels and they are truly ground-truth, but there’s a distribution shift between real and synthetic images, and they have restricted scene coverage + +The authors train different image encoders only on synthetic images and find out unless the encoder is very large the model can’t generalize well (but large models generalize inherently anyway) 🧐 +But they still fail encountering real images that have wide distribution in labels (e.g. diverse instances of objects) 🥲 + +Depth Anything v2 framework is to.. + +🦖 Train a teacher model based on DINOv2-G based on 595K synthetic images +🏷️ Label 62M real images using teacher model +🦕 Train a student model using the real images labelled by teacher +Result: 10x faster and more accurate than Marigold! + +The authors also construct a new benchmark called DA-2K that is less noisy, highly detailed and more diverse! +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/UbIwAPTcbmYYkZidLXxJF.png'}]",[],"[{'reaction': '🔥', 'users': ['osanseviero', 'Ramikan-BR', 'fynnkroeger', 'SixOpen', 'kingabzpro', 'Taylor658', 'clem', 'Hev832', 'shadinpira80', 'xu3kev', 'radames', 'louisbrulenaudet'], 'count': 12}, {'reaction': '🚀', 'users': ['Ramikan-BR', 'Tuana', 'kramp', 'clem', 'super-cinnamon'], 'count': 5}, {'reaction': '❤️', 'users': ['Ramikan-BR', 'clem', 'melmass', 'shaising', 'radames'], 'count': 5}, {'reaction': '👀', 'users': ['Ramikan-BR', 'clem'], 'count': 2}, {'reaction': '🤗', 'users': ['kingabzpro', 'clem'], 'count': 2}]",2024-06-18 13:59:59,2024-06-18 13:59:59.663,[],/posts/merve/568638914646708,4235,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/635dd6cd4fabde0df74aeae6/23c0uEOr7RWDtSLDBzkPD.png,573.0,araminta_k,alvdansen,999224288136633,"[{'type': 'text', 'value': 'Hey All!', 'raw': 'Hey All!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've been asked a lot of share more on how I train LoRAs. The truth is I don't think my advice is very helpful without also including more contextual, theoretical commentary on how I **think** about training LoRAs for SDXL and other models. "", 'raw': ""I've been asked a lot of share more on how I train LoRAs. The truth is I don't think my advice is very helpful without also including more contextual, theoretical commentary on how I **think** about training LoRAs for SDXL and other models. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I wrote a first article here about it - let me know what you think.', 'raw': 'I wrote a first article here about it - let me know what you think.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/alvdansen/thoughts-on-lora-training-1', 'raw': 'https://huggingface.co/blog/alvdansen/thoughts-on-lora-training-1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit: Also people kept asking where to start so I made a list of possible resources:', 'raw': 'Edit: Also people kept asking where to start so I made a list of possible resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/blog/alvdansen/thoughts-on-lora-training-pt-2-training-services', 'raw': 'https://huggingface.co/blog/alvdansen/thoughts-on-lora-training-pt-2-training-services'}]","Hey All! + +I've been asked a lot of share more on how I train LoRAs. The truth is I don't think my advice is very helpful without also including more contextual, theoretical commentary on how I **think** about training LoRAs for SDXL and other models. + +I wrote a first article here about it - let me know what you think. + +https://huggingface.co/blog/alvdansen/thoughts-on-lora-training-1 + +Edit: Also people kept asking where to start so I made a list of possible resources: +https://huggingface.co/blog/alvdansen/thoughts-on-lora-training-pt-2-training-services",[],[],"[{'reaction': '❤️', 'users': ['victor', 'shaising', 'merve', 'not-lain', 'netynet', 'dillfrescott', 'KingNish', 'philipp-zettl', 'kramp'], 'count': 9}, {'reaction': '👍', 'users': ['digiplay', 'victor', 'merve', 'not-lain', 'dillfrescott'], 'count': 5}]",2024-06-18 13:38:30,2024-06-27 03:50:40.376,"[{'_id': '65fb3921c651da86eeea4890', 'avatarUrl': '/avatars/8473d30b909208e7dd5828620bcb4ce1.svg', 'fullname': 'Wallow', 'name': 'Viktor1233', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '635dd6cd4fabde0df74aeae6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/635dd6cd4fabde0df74aeae6/23c0uEOr7RWDtSLDBzkPD.png', 'fullname': 'araminta_k', 'name': 'alvdansen', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 573, 'isFollowing': False}, {'_id': '62a9afdd0472c0b7f94c491c', 'avatarUrl': '/avatars/7ca2d750fb67cc848dc07a9161bfa9dd.svg', 'fullname': 'Martin Viewegger', 'name': 'Viewegger', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '6406a284a577649430c5bfdd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6406a284a577649430c5bfdd/6mGZ3EOvkWWvUGVv04Tm9.jpeg', 'fullname': 'Fihade', 'name': 'Fihade', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/alvdansen/999224288136633,3255,,13 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,948457903511815,"[{'type': 'text', 'value': '💰 𝗚𝗲𝘁 𝘁𝗵𝗲 𝗽𝗿𝗶𝗰𝗲 𝗼𝗳 𝗮𝗻𝘆 𝗟𝗟𝗠 𝗔𝗣𝗜 𝗿𝗲𝗾𝘂𝗲𝘀𝘁 ⇒ 𝘁𝗼𝗸𝗲𝗻𝗰𝗼𝘀𝘁', 'raw': '💰 𝗚𝗲𝘁 𝘁𝗵𝗲 𝗽𝗿𝗶𝗰𝗲 𝗼𝗳 𝗮𝗻𝘆 𝗟𝗟𝗠 𝗔𝗣𝗜 𝗿𝗲𝗾𝘂𝗲𝘀𝘁 ⇒ 𝘁𝗼𝗸𝗲𝗻𝗰𝗼𝘀𝘁'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've just found out about 𝙰𝚐𝚎𝚗𝚝𝙾𝚙𝚜-𝙰𝙸/𝚝𝚘𝚔𝚎𝚗𝚌𝚘𝚜𝚝 ("", 'raw': ""I've just found out about 𝙰𝚐𝚎𝚗𝚝𝙾𝚙𝚜-𝙰𝙸/𝚝𝚘𝚔𝚎𝚗𝚌𝚘𝚜𝚝 (""}, {'type': 'link', 'href': 'https://github.com/AgentOps-AI/tokencost', 'raw': 'https://github.com/AgentOps-AI/tokencost'}, {'type': 'text', 'value': ').', 'raw': ').'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗧𝗵𝗶𝘀 𝗹𝗶𝗯𝗿𝗮𝗿𝘆 𝗴𝗶𝘃𝗲𝘀 𝘆𝗼𝘂 𝘁𝗵𝗲 𝗽𝗿𝗶𝗰𝗲 𝗼𝗳 𝘆𝗼𝘂𝗿 𝗰𝗮𝗹𝗹𝘀 𝘁𝗼 𝗮𝗻𝘆 𝗟𝗟𝗠 𝗔𝗣𝗜: OpenAI, Anthropic, Mistral, AWS or Databricks...', 'raw': '𝗧𝗵𝗶𝘀 𝗹𝗶𝗯𝗿𝗮𝗿𝘆 𝗴𝗶𝘃𝗲𝘀 𝘆𝗼𝘂 𝘁𝗵𝗲 𝗽𝗿𝗶𝗰𝗲 𝗼𝗳 𝘆𝗼𝘂𝗿 𝗰𝗮𝗹𝗹𝘀 𝘁𝗼 𝗮𝗻𝘆 𝗟𝗟𝗠 𝗔𝗣𝗜: OpenAI, Anthropic, Mistral, AWS or Databricks...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For any model, you can use as input either string prompts or messages, and get as outputs either the price or token count.', 'raw': 'For any model, you can use as input either string prompts or messages, and get as outputs either the price or token count.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Congrats to the AgentOps-AI team: this will be very useful when trying to get a ballpark estimate of a project's price, to compare APIs, or for precise monitoring of usage!"", 'raw': ""Congrats to the AgentOps-AI team: this will be very useful when trying to get a ballpark estimate of a project's price, to compare APIs, or for precise monitoring of usage!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Daily reminder: 𝗿𝘂𝗻𝗻𝗶𝗻𝗴 𝗮𝗻 𝗔𝟭𝟬𝟬 𝗰𝗼𝘀𝘁𝘀 𝘆𝗼𝘂 𝗲𝘅𝗮𝗰𝘁𝗹𝘆 $𝟬.𝟬𝟬/𝗵𝗼𝘂𝗿 (or 0.00€ in current exchange rates) on a HF space with ZeroGPU!', 'raw': '✨ Daily reminder: 𝗿𝘂𝗻𝗻𝗶𝗻𝗴 𝗮𝗻 𝗔𝟭𝟬𝟬 𝗰𝗼𝘀𝘁𝘀 𝘆𝗼𝘂 𝗲𝘅𝗮𝗰𝘁𝗹𝘆 $𝟬.𝟬𝟬/𝗵𝗼𝘂𝗿 (or 0.00€ in current exchange rates) on a HF space with ZeroGPU!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Learn more on ZeroGPU 👉 ', 'raw': 'Learn more on ZeroGPU 👉 '}, {'type': 'link', 'href': 'https://www.datacenterdynamics.com/en/news/hugging-face-launches-zerogpu-project-to-democratize-ai-gives-away-10-million-worth-of-compute/', 'raw': 'https://www.datacenterdynamics.com/en/news/hugging-face-launches-zerogpu-project-to-democratize-ai-gives-away-10-million-worth-of-compute/'}]","💰 𝗚𝗲𝘁 𝘁𝗵𝗲 𝗽𝗿𝗶𝗰𝗲 𝗼𝗳 𝗮𝗻𝘆 𝗟𝗟𝗠 𝗔𝗣𝗜 𝗿𝗲𝗾𝘂𝗲𝘀𝘁 ⇒ 𝘁𝗼𝗸𝗲𝗻𝗰𝗼𝘀𝘁 + +I've just found out about 𝙰𝚐𝚎𝚗𝚝𝙾𝚙𝚜-𝙰𝙸/𝚝𝚘𝚔𝚎𝚗𝚌𝚘𝚜𝚝 (https://github.com/AgentOps-AI/tokencost). +𝗧𝗵𝗶𝘀 𝗹𝗶𝗯𝗿𝗮𝗿𝘆 𝗴𝗶𝘃𝗲𝘀 𝘆𝗼𝘂 𝘁𝗵𝗲 𝗽𝗿𝗶𝗰𝗲 𝗼𝗳 𝘆𝗼𝘂𝗿 𝗰𝗮𝗹𝗹𝘀 𝘁𝗼 𝗮𝗻𝘆 𝗟𝗟𝗠 𝗔𝗣𝗜: OpenAI, Anthropic, Mistral, AWS or Databricks... + +For any model, you can use as input either string prompts or messages, and get as outputs either the price or token count. + +Congrats to the AgentOps-AI team: this will be very useful when trying to get a ballpark estimate of a project's price, to compare APIs, or for precise monitoring of usage! + +✨ Daily reminder: 𝗿𝘂𝗻𝗻𝗶𝗻𝗴 𝗮𝗻 𝗔𝟭𝟬𝟬 𝗰𝗼𝘀𝘁𝘀 𝘆𝗼𝘂 𝗲𝘅𝗮𝗰𝘁𝗹𝘆 $𝟬.𝟬𝟬/𝗵𝗼𝘂𝗿 (or 0.00€ in current exchange rates) on a HF space with ZeroGPU! +Learn more on ZeroGPU 👉 https://www.datacenterdynamics.com/en/news/hugging-face-launches-zerogpu-project-to-democratize-ai-gives-away-10-million-worth-of-compute/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/S0GpNkE2ZTHNMMi4jNgyv.png'}]",[],"[{'reaction': '👍', 'users': ['umair894', 'Pavarissy', 'kramp', 'victor', 'gabrielmbmb', 'Taylor658', 'sigridjineth', 'maywell', 'radames', 'sosoai', 'prithivMLmods', 'introvoyz041'], 'count': 12}, {'reaction': '😎', 'users': ['GPT007'], 'count': 1}, {'reaction': '🔥', 'users': ['Vanos007'], 'count': 1}]",2024-06-18 13:08:27,2024-09-11 09:57:59.491,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '63d10d4e8eaa4831005e92b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg', 'fullname': 'Aymeric Roucher', 'name': 'm-ric', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1652, 'isFollowing': False}]",/posts/m-ric/948457903511815,3143,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/664b25abfebdc298f724bdf0/pPb3xhDAOV_aLS642kPIW.png,8.0,Boris Orekhov,nevmenandr,853450311709108,"[{'type': 'resource', 'resource': {'type': 'dataset', 'id': 'nevmenandr/incoming-students-ma-dh-hse-university'}, 'url': 'https://huggingface.co/datasets/nevmenandr/incoming-students-ma-dh-hse-university', 'raw': 'https://huggingface.co/datasets/nevmenandr/incoming-students-ma-dh-hse-university'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset visualized by datawrapper', 'raw': 'Dataset visualized by datawrapper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'html', 'code': '
', 'raw': '```html\n
\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Data from my talk in February: ', 'raw': 'Data from my talk in February: '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=ZfXqvIzl5fo', 'raw': 'https://www.youtube.com/watch?v=ZfXqvIzl5fo'}, {'type': 'text', 'value': ' . Slides: ', 'raw': ' . Slides: '}, {'type': 'link', 'href': 'https://nevmenandr.github.io/slides/2024-02-02/slides.pdf', 'raw': 'https://nevmenandr.github.io/slides/2024-02-02/slides.pdf'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}]","https://huggingface.co/datasets/nevmenandr/incoming-students-ma-dh-hse-university +Dataset visualized by datawrapper + +```html +
+``` +Data from my talk in February: https://www.youtube.com/watch?v=ZfXqvIzl5fo . Slides: https://nevmenandr.github.io/slides/2024-02-02/slides.pdf. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/664b25abfebdc298f724bdf0/SdpAVyGxnkZrd9SSuc9yL.png'}]",[],[],2024-06-18 12:38:10,2024-06-18 12:47:34.092,[],/posts/nevmenandr/853450311709108,591,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,146689464936084,"[{'type': 'text', 'value': 'New Appearance from Ollama Open WebUI!', 'raw': 'New Appearance from Ollama Open WebUI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And Also web search, Realtime talking and File RAG!', 'raw': 'And Also web search, Realtime talking and File RAG!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://chemllm.org/', 'raw': 'https://chemllm.org/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","New Appearance from Ollama Open WebUI! +And Also web search, Realtime talking and File RAG! +https://chemllm.org/ + + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64bce15bafd1e46c5504ad38/jqmfsSZs1sn3Rp5Ll8_Zn.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64bce15bafd1e46c5504ad38/i9y-iLZoUWs8UoWqxdfXs.png'}]",[],"[{'reaction': '😎', 'users': ['louisbrulenaudet'], 'count': 1}]",2024-06-12 15:19:50,2024-06-12 15:43:57.096,[],/posts/di-zhang-fdu/146689464936084,1006,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/657eb5b256c9c67605a6e8b5/RPblnGJX57oiIcASEz_S8.png,37.0,raincandy_U,raincandy-u,751393350745146,"[{'type': 'text', 'value': '🤗 I trained what is probably the smallest (600k ~) TinyStories model! It really can write grammatically correct stories!', 'raw': '🤗 I trained what is probably the smallest (600k ~) TinyStories model! It really can write grammatically correct stories!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'raincandy-u/TinyStories-656K'}, 'url': 'https://huggingface.co/raincandy-u/TinyStories-656K', 'raw': 'https://huggingface.co/raincandy-u/TinyStories-656K'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try this space based on this minuscule model!', 'raw': 'Try this space based on this minuscule model!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'raincandy-u/Story-Teller'}, 'url': 'https://huggingface.co/spaces/raincandy-u/Story-Teller', 'raw': 'https://huggingface.co/spaces/raincandy-u/Story-Teller'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit: Moreover, the model weight size is only 1.31MB under bf16, and can be reduced to the 700KB level when using Q8_0 quantization U•ェ•*U', 'raw': 'Edit: Moreover, the model weight size is only 1.31MB under bf16, and can be reduced to the 700KB level when using Q8_0 quantization U•ェ•*U'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edit: Now 1000K params chat model!', 'raw': 'Edit: Now 1000K params chat model!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'raincandy-u/TinyChat-1776K'}, 'url': 'https://huggingface.co/raincandy-u/TinyChat-1776K', 'raw': 'https://huggingface.co/raincandy-u/TinyChat-1776K'}]","🤗 I trained what is probably the smallest (600k ~) TinyStories model! It really can write grammatically correct stories! + +https://huggingface.co/raincandy-u/TinyStories-656K + +Try this space based on this minuscule model! + +https://huggingface.co/spaces/raincandy-u/Story-Teller + +Edit: Moreover, the model weight size is only 1.31MB under bf16, and can be reduced to the 700KB level when using Q8_0 quantization U•ェ•*U + +Edit: Now 1000K params chat model! + +https://huggingface.co/raincandy-u/TinyChat-1776K",[],[],"[{'reaction': '🚀', 'users': ['raincandy-u', 'victor', 'Clausss'], 'count': 3}, {'reaction': '🔥', 'users': ['victor', 'raincandy-u', 'Clausss'], 'count': 3}, {'reaction': '👀', 'users': ['Clausss', 'raincandy-u'], 'count': 2}, {'reaction': '🤯', 'users': ['raincandy-u', 'Ballzlver49'], 'count': 2}]",2024-06-12 12:19:13,2024-06-13 01:10:38.214,"[{'_id': '638a1f3258afc73b9c64a761', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/638a1f3258afc73b9c64a761/A6lqaOhd6Rt4bdIkdYOB6.png', 'fullname': 'Shaikat', 'name': 'shaikatasif', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 17, 'isFollowing': False}, {'_id': '657eb5b256c9c67605a6e8b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/657eb5b256c9c67605a6e8b5/RPblnGJX57oiIcASEz_S8.png', 'fullname': 'raincandy_U', 'name': 'raincandy-u', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 37, 'isFollowing': False}]",/posts/raincandy-u/751393350745146,2483,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/630f3058236215d0b7078806/TRTdqAZpT1bJg_RvGgxlg.jpeg,29.0,Tarun Jain,lucifertrj,230575412081053,"[{'type': 'text', 'value': 'Advanced RAG - Hybrid Search using HuggingFace Models', 'raw': 'Advanced RAG - Hybrid Search using HuggingFace Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chat with PDF in 10 lines of code:', 'raw': 'Chat with PDF in 10 lines of code:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': '# pip install beyondllm\n# pip install llama-index-embeddings-fastembed\n\nfrom beyondllm import source,retrieve,embeddings,llms,generator\nimport os\nfrom getpass import getpass\nos.environ[\'HUGGINGFACE_ACCESS_TOKEN\'] = getpass(""Enter your HF API token:"")\n\ndata = source.fit(""sample.pdf"", dtype=""pdf"")\nembed_model = embeddings.FastEmbedEmbeddings()\n\nretriever = auto_retriever(\n data=data, embed_model=embed_model,\n type=""hybrid"", top_k=5, mode=""OR""\n)\n\nllm = HuggingFaceHubModel(model=""mistralai/Mistral-7B-Instruct-v0.2"")\npipeline = generator.Generate(question="""",llm=llm,retriever=retriever)\nprint(pipeline.call())', 'raw': '```\n# pip install beyondllm\n# pip install llama-index-embeddings-fastembed\n\nfrom beyondllm import source,retrieve,embeddings,llms,generator\nimport os\nfrom getpass import getpass\nos.environ[\'HUGGINGFACE_ACCESS_TOKEN\'] = getpass(""Enter your HF API token:"")\n\ndata = source.fit(""sample.pdf"", dtype=""pdf"")\nembed_model = embeddings.FastEmbedEmbeddings()\n\nretriever = auto_retriever(\n data=data, embed_model=embed_model,\n type=""hybrid"", top_k=5, mode=""OR""\n)\n\nllm = HuggingFaceHubModel(model=""mistralai/Mistral-7B-Instruct-v0.2"")\npipeline = generator.Generate(question="""",llm=llm,retriever=retriever)\nprint(pipeline.call())\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cookbook: ', 'raw': 'Cookbook: '}, {'type': 'link', 'href': 'https://github.com/aiplanethub/beyondllm/blob/main/cookbook/Implementing_Hybrid_Search.ipynb', 'raw': 'https://github.com/aiplanethub/beyondllm/blob/main/cookbook/Implementing_Hybrid_Search.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Support the project by giving a ⭐️ to the repo', 'raw': 'Support the project by giving a ⭐️ to the repo'}, {'type': 'new_line', 'raw': '\n'}]","Advanced RAG - Hybrid Search using HuggingFace Models + +Chat with PDF in 10 lines of code: + +``` +# pip install beyondllm +# pip install llama-index-embeddings-fastembed + +from beyondllm import source,retrieve,embeddings,llms,generator +import os +from getpass import getpass +os.environ['HUGGINGFACE_ACCESS_TOKEN'] = getpass(""Enter your HF API token:"") + +data = source.fit(""sample.pdf"", dtype=""pdf"") +embed_model = embeddings.FastEmbedEmbeddings() + +retriever = auto_retriever( + data=data, embed_model=embed_model, + type=""hybrid"", top_k=5, mode=""OR"" +) + +llm = HuggingFaceHubModel(model=""mistralai/Mistral-7B-Instruct-v0.2"") +pipeline = generator.Generate(question="""",llm=llm,retriever=retriever) +print(pipeline.call()) +``` + +Cookbook: https://github.com/aiplanethub/beyondllm/blob/main/cookbook/Implementing_Hybrid_Search.ipynb + +Support the project by giving a ⭐️ to the repo +",[],[],"[{'reaction': '👀', 'users': ['victor', 'daltheman', 'lucifertrj', 'Ramikan-BR', 'lunarflu', 'ToKrCZ', 'catastropiyush'], 'count': 7}, {'reaction': '😎', 'users': ['METASPORTSFUMEZ'], 'count': 1}, {'reaction': '🔥', 'users': ['umair894'], 'count': 1}]",2024-06-12 09:38:59,2024-06-12 09:39:32.540,[],/posts/lucifertrj/230575412081053,2318,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,824565620118854,"[{'type': 'text', 'value': '📢Delighted to share personal findings 🔎 East Asian LLM Qwen1.5 🇨🇳 reasoning capabilities 🧠 Target Sentiment Analysis (TSA). Starting with the one of the smallest Qwen1.5-1.8B-Chat version, for the original Eastern-European texts (🇷🇺) and their translated versions (🇺🇸) in zero-shot-learning mode setup, the key takeaways of such experiments were as follows:', 'raw': '📢Delighted to share personal findings 🔎 East Asian LLM Qwen1.5 🇨🇳 reasoning capabilities 🧠 Target Sentiment Analysis (TSA). Starting with the one of the smallest Qwen1.5-1.8B-Chat version, for the original Eastern-European texts (🇷🇺) and their translated versions (🇺🇸) in zero-shot-learning mode setup, the key takeaways of such experiments were as follows:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 1. Model is capable to perform reasoning in Eastern Eropean languages (🇷🇺) (remember it is 1.8B), switching to Qwen2 results in strong improvement, with results that surpasses LLaMA2-70B-chat (more on difference below). ', 'raw': '✅ 1. Model is capable to perform reasoning in Eastern Eropean languages (🇷🇺) (remember it is 1.8B), switching to Qwen2 results in strong improvement, with results that surpasses LLaMA2-70B-chat (more on difference below). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 2. Despite the size of 1.8B, reasoning in English has a significant gap in underpeforming (F1=~34%) to the most closest Flan-T5-XL (2.8B) which showcases F1=43%.', 'raw': '✅ 2. Despite the size of 1.8B, reasoning in English has a significant gap in underpeforming (F1=~34%) to the most closest Flan-T5-XL (2.8B) which showcases F1=43%.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 The most intriguing fact that Qwen1.5-1.8B-Chat: ', 'raw': '💡 The most intriguing fact that Qwen1.5-1.8B-Chat: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'it generates new words in Russian I\'ve never seen before: ""неретеневая"" (negativitively), and imputes the entries in Chinese. The reason of such a low results, is that model was not been able to follow the input instruction and shares all the opinions per each class. All of that has been improved though in Qwen2-1.5B.', 'raw': 'it generates new words in Russian I\'ve never seen before: ""неретеневая"" (negativitively), and imputes the entries in Chinese. The reason of such a low results, is that model was not been able to follow the input instruction and shares all the opinions per each class. All of that has been improved though in Qwen2-1.5B.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benchmark: ', 'raw': 'Benchmark: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/RuSentNE-LLM-Benchmark', 'raw': 'https://github.com/nicolay-r/RuSentNE-LLM-Benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Qwen/Qwen1.5-1.8B-Chat'}, 'url': 'https://huggingface.co/Qwen/Qwen1.5-1.8B-Chat', 'raw': 'https://huggingface.co/Qwen/Qwen1.5-1.8B-Chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'link', 'href': 'https://github.com/dialogue-evaluation/RuSentNE-evaluation', 'raw': 'https://github.com/dialogue-evaluation/RuSentNE-evaluation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342)', 'raw': 'Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection: ', 'raw': 'Collection: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101', 'raw': 'https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101'}]","📢Delighted to share personal findings 🔎 East Asian LLM Qwen1.5 🇨🇳 reasoning capabilities 🧠 Target Sentiment Analysis (TSA). Starting with the one of the smallest Qwen1.5-1.8B-Chat version, for the original Eastern-European texts (🇷🇺) and their translated versions (🇺🇸) in zero-shot-learning mode setup, the key takeaways of such experiments were as follows: + +✅ 1. Model is capable to perform reasoning in Eastern Eropean languages (🇷🇺) (remember it is 1.8B), switching to Qwen2 results in strong improvement, with results that surpasses LLaMA2-70B-chat (more on difference below). +✅ 2. Despite the size of 1.8B, reasoning in English has a significant gap in underpeforming (F1=~34%) to the most closest Flan-T5-XL (2.8B) which showcases F1=43%. + +💡 The most intriguing fact that Qwen1.5-1.8B-Chat: +it generates new words in Russian I've never seen before: ""неретеневая"" (negativitively), and imputes the entries in Chinese. The reason of such a low results, is that model was not been able to follow the input instruction and shares all the opinions per each class. All of that has been improved though in Qwen2-1.5B. + +Benchmark: https://github.com/nicolay-r/RuSentNE-LLM-Benchmark +Model: https://huggingface.co/Qwen/Qwen1.5-1.8B-Chat +Dataset: https://github.com/dialogue-evaluation/RuSentNE-evaluation +Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342) +Collection: https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/Xz0buOX80UKWZEkAl34AY.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/nRO2mjT7FRyl8QOGQDR47.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/qj1dyp6QPMfoA72bjkE1S.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/Qt0ikA3YU5YFBk480lBqA.png'}]",[],"[{'reaction': '👍', 'users': ['victor', 'kristaller486', 'louisbrulenaudet', 'ZeroWw'], 'count': 4}]",2024-06-12 07:51:39,2024-06-12 14:40:12.326,[],/posts/nicolay-r/824565620118854,1689,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,185242785650522,"[{'type': 'text', 'value': 'All You Need To Know About Apple Intelligence Architecture And Models!!', 'raw': 'All You Need To Know About Apple Intelligence Architecture And Models!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'One key challenge with running llms on device is a balance between compute, performance and model size. Apple Intelligence solves this using small/specialized chunks (Adapters) of the on-device foundation model when needed.', 'raw': 'One key challenge with running llms on device is a balance between compute, performance and model size. Apple Intelligence solves this using small/specialized chunks (Adapters) of the on-device foundation model when needed.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For compute, they engineered a new framework that uses LoRA adapters of rank 16, allowing a merged 2-bit and 4-bit config that yields up to 3.5 bits per weight, achieving the same performance as the uncompressed models.', 'raw': 'For compute, they engineered a new framework that uses LoRA adapters of rank 16, allowing a merged 2-bit and 4-bit config that yields up to 3.5 bits per weight, achieving the same performance as the uncompressed models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With the help of an OSS model latency and power analysis tool (Talaria), they were able to optimize the bit rate selection for each operation. This along with activation & embedding quantizations plus efficient key-value caching, achieved up to 30 tokens/sec on iPhone 15 pro.', 'raw': 'With the help of an OSS model latency and power analysis tool (Talaria), they were able to optimize the bit rate selection for each operation. This along with activation & embedding quantizations plus efficient key-value caching, achieved up to 30 tokens/sec on iPhone 15 pro.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'When the model is prompted (e.g to rewrite an email in the mail app), the app draws from the app intents toolbox which sends the prompt to the adapter specialized for writing, the model responds through the same pipeline with a real-time update of the text to rewrite.', 'raw': 'When the model is prompted (e.g to rewrite an email in the mail app), the app draws from the app intents toolbox which sends the prompt to the adapter specialized for writing, the model responds through the same pipeline with a real-time update of the text to rewrite.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The coolest feature of these models is their ability to adapt and dynamically specialize on user’s everyday activities. For this they adapt the attention matrices, the attention projection matrix, and the fully connected layers in the point-wise feedforward networks for a suitable set of the decoding layers of the transformer architecture.', 'raw': 'The coolest feature of these models is their ability to adapt and dynamically specialize on user’s everyday activities. For this they adapt the attention matrices, the attention projection matrix, and the fully connected layers in the point-wise feedforward networks for a suitable set of the decoding layers of the transformer architecture.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For tasks that require more capable models, the arch utilizes server/larger models on a private cloud compute infrastructure that delivers SOTA secured and verifiable privacy experience.', 'raw': 'For tasks that require more capable models, the arch utilizes server/larger models on a private cloud compute infrastructure that delivers SOTA secured and verifiable privacy experience.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More on the private cloud compute: ', 'raw': 'More on the private cloud compute: '}, {'type': 'link', 'href': 'https://developer.apple.com/videos/play/wwdc2024/102/', 'raw': 'https://developer.apple.com/videos/play/wwdc2024/102/'}]","All You Need To Know About Apple Intelligence Architecture And Models!! + +One key challenge with running llms on device is a balance between compute, performance and model size. Apple Intelligence solves this using small/specialized chunks (Adapters) of the on-device foundation model when needed. + +For compute, they engineered a new framework that uses LoRA adapters of rank 16, allowing a merged 2-bit and 4-bit config that yields up to 3.5 bits per weight, achieving the same performance as the uncompressed models. + +With the help of an OSS model latency and power analysis tool (Talaria), they were able to optimize the bit rate selection for each operation. This along with activation & embedding quantizations plus efficient key-value caching, achieved up to 30 tokens/sec on iPhone 15 pro. + +When the model is prompted (e.g to rewrite an email in the mail app), the app draws from the app intents toolbox which sends the prompt to the adapter specialized for writing, the model responds through the same pipeline with a real-time update of the text to rewrite. + +The coolest feature of these models is their ability to adapt and dynamically specialize on user’s everyday activities. For this they adapt the attention matrices, the attention projection matrix, and the fully connected layers in the point-wise feedforward networks for a suitable set of the decoding layers of the transformer architecture. + +For tasks that require more capable models, the arch utilizes server/larger models on a private cloud compute infrastructure that delivers SOTA secured and verifiable privacy experience. + +More on the private cloud compute: https://developer.apple.com/videos/play/wwdc2024/102/","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/ltFwRuY0CvMWILnYDv8Pn.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/r7RVkSbaVsRjjw0S5vn9D.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/3tDGKd7ac-dzVL_1aXexB.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/yaQ174B_pwa78oFLtD73P.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/W19Pi3EqSP6nz9O2Fvtz6.png'}]",[],"[{'reaction': '🔥', 'users': ['KingNish', 'kinokritik', 'den0620', 'rreed-pha', 'gksriharsha', 'theainerd', 'jlzhou', 'Korakoe'], 'count': 8}, {'reaction': '👍', 'users': ['dillfrescott', 'jungmin76park'], 'count': 2}, {'reaction': '🤝', 'users': ['victor'], 'count': 1}]",2024-06-12 06:45:07,2024-06-13 00:54:01.876,[],/posts/Jaward/185242785650522,2300,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63f706dfe94ed998c463ed66/tAGil2qiFNev6CfEEDseV.png,22.0,Cuiunbo,Cuiunbo,620151738757120,"[{'type': 'text', 'value': 'Introducing GUICourse! 🎉 ', 'raw': 'Introducing GUICourse! 🎉 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'By leveraging extensive OCR pretraining with grounding ability, we unlock the potential of parsing-free methods for GUIAgent. ', 'raw': 'By leveraging extensive OCR pretraining with grounding ability, we unlock the potential of parsing-free methods for GUIAgent. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: (', 'raw': '📄 Paper: ('}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2406.11317'}, 'url': 'https://huggingface.co/papers/2406.11317', 'raw': 'https://huggingface.co/papers/2406.11317', 'label': 'GUICourse: From General Vision Language Models to Versatile GUI Agents (2406.11317)'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Repo: (', 'raw': '🌐 Github Repo: ('}, {'type': 'link', 'href': 'https://github.com/yiye3/GUICourse', 'raw': 'https://github.com/yiye3/GUICourse'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 Dataset: (', 'raw': '📖 Dataset: ('}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'yiye2023/GUIAct'}, 'url': 'https://huggingface.co/datasets/yiye2023/GUIAct', 'raw': 'https://huggingface.co/datasets/yiye2023/GUIAct'}, {'type': 'text', 'value': ') / (', 'raw': ') / ('}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'yiye2023/GUIChat'}, 'url': 'https://huggingface.co/datasets/yiye2023/GUIChat', 'raw': 'https://huggingface.co/datasets/yiye2023/GUIChat'}, {'type': 'text', 'value': ') / (', 'raw': ') / ('}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'yiye2023/GUIEnv'}, 'url': 'https://huggingface.co/datasets/yiye2023/GUIEnv', 'raw': 'https://huggingface.co/datasets/yiye2023/GUIEnv'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯 Model: (', 'raw': '🎯 Model: ('}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'RhapsodyAI/minicpm-guidance'}, 'url': 'https://huggingface.co/RhapsodyAI/minicpm-guidance', 'raw': 'https://huggingface.co/RhapsodyAI/minicpm-guidance'}, {'type': 'text', 'value': ') / (', 'raw': ') / ('}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'RhapsodyAI/qwen_vl_guidance'}, 'url': 'https://huggingface.co/RhapsodyAI/qwen_vl_guidance', 'raw': 'https://huggingface.co/RhapsodyAI/qwen_vl_guidance'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}]","Introducing GUICourse! 🎉 +By leveraging extensive OCR pretraining with grounding ability, we unlock the potential of parsing-free methods for GUIAgent. +📄 Paper: (https://huggingface.co/papers/2406.11317) +🌐 Github Repo: (https://github.com/yiye3/GUICourse) +📖 Dataset: (https://huggingface.co/datasets/yiye2023/GUIAct) / (https://huggingface.co/datasets/yiye2023/GUIChat) / (https://huggingface.co/datasets/yiye2023/GUIEnv) +🎯 Model: (https://huggingface.co/RhapsodyAI/minicpm-guidance) / (https://huggingface.co/RhapsodyAI/qwen_vl_guidance) +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63f706dfe94ed998c463ed66/boKfVz9_OBUCX5PIIS3kY.gif'}]",[],"[{'reaction': '🔥', 'users': ['jwls333', 'victor', 'thibaultM', 'Norod78', 'bokesyo'], 'count': 5}, {'reaction': '👍', 'users': ['Eyel', 'catastropiyush', 'Chars', 'victor'], 'count': 4}, {'reaction': '😎', 'users': ['Cuiunbo', 'Taylor658'], 'count': 2}]",2024-06-11 20:56:35,2024-07-18 15:21:24.390,"[{'_id': '63f706dfe94ed998c463ed66', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63f706dfe94ed998c463ed66/tAGil2qiFNev6CfEEDseV.png', 'fullname': 'Cuiunbo', 'name': 'Cuiunbo', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 22, 'isFollowing': False}, {'_id': '65c12494091bd4e410cd7ce3', 'avatarUrl': '/avatars/9be2a3096db5e369f9e616215dc30550.svg', 'fullname': 'Another Coder', 'name': 'anothercoder2', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Cuiunbo/620151738757120,2585,,16 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg,3949.0,Victor Mustar,victor,855680325748217,"[{'type': 'text', 'value': 'Congrats to ', 'raw': 'Congrats to '}, {'type': 'mention', 'user': 'alvdansen', 'raw': '@alvdansen'}, {'type': 'text', 'value': "" for one of the nicest SD LoRA ever. It's so sharp and beautiful! "", 'raw': "" for one of the nicest SD LoRA ever. It's so sharp and beautiful! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check the model page to try it on your own prompts: ', 'raw': 'Check the model page to try it on your own prompts: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'alvdansen/BandW-Manga'}, 'url': 'https://huggingface.co/alvdansen/BandW-Manga', 'raw': 'https://huggingface.co/alvdansen/BandW-Manga'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And follow ', 'raw': 'And follow '}, {'type': 'mention', 'user': 'alvdansen', 'raw': '@alvdansen'}, {'type': 'text', 'value': ' for more 😙', 'raw': ' for more 😙'}]","Congrats to @alvdansen for one of the nicest SD LoRA ever. It's so sharp and beautiful! +Check the model page to try it on your own prompts: https://huggingface.co/alvdansen/BandW-Manga +And follow @alvdansen for more 😙","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/tXjxX-Xja4SSx-YDkriQi.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/KqPjySZmQJ-0-DhYWcBFG.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/wtk9zdKCNHTdRmSzjpv3W.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/L_JxKncStuixlJmT6I8RT.png'}]","[{'_id': '635dd6cd4fabde0df74aeae6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/635dd6cd4fabde0df74aeae6/23c0uEOr7RWDtSLDBzkPD.png', 'fullname': 'araminta_k', 'name': 'alvdansen', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 573}]","[{'reaction': '❤️', 'users': ['not-lain', 's3nh', 'Delik', 'louisbrulenaudet', 'fynnkroeger', 'pcuenq', 'nbroad', 'Svngoku', 't1u1', 'Francesco', 'ZeroWw'], 'count': 11}, {'reaction': '🔥', 'users': ['not-lain', 's3nh', 'Hev832', 'mattmdjaga', 'kramp', 'pcuenq', 'nbroad', 'catastropiyush'], 'count': 8}]",2024-06-11 20:49:25,2024-07-10 10:55:31.697,"[{'_id': '635dd6cd4fabde0df74aeae6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/635dd6cd4fabde0df74aeae6/23c0uEOr7RWDtSLDBzkPD.png', 'fullname': 'araminta_k', 'name': 'alvdansen', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 573, 'isFollowing': False}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}, {'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}]",/posts/victor/855680325748217,2408,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,683959932996666,"[{'type': 'text', 'value': ""Who said you couldn't build a big business based on open-source AI? Congrats Mistral team: "", 'raw': ""Who said you couldn't build a big business based on open-source AI? Congrats Mistral team: ""}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'mistralai'}, 'url': 'https://huggingface.co/mistralai', 'raw': 'https://huggingface.co/mistralai', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/634c17653d11eaedd88b314d/9OgyfKstSZtbmsmuG8MbU.png'}]",Who said you couldn't build a big business based on open-source AI? Congrats Mistral team: https://huggingface.co/mistralai,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/6hYgQrX5eOWT6sQksqDIa.png'}]",[],"[{'reaction': '🚀', 'users': ['Dref360', 'merve', 'SkalskiP', 'YaTharThShaRma999', 'osanseviero', 'Taylor658', 'nroggendorff', 'ElhamAryanpur', 'victor', 'dillfrescott', 'nbroad', 'Jdkee', 'beomi', 'KingNish', 'umseeker', 'kramp', 'bwang0911', 'not-lain', 'enzostvs', 'shamrockmuffin13', 'pepsighan', 'celsowm', 'vivek-r7', 'denver1603', 'IlyasMoutawwakil'], 'count': 25}, {'reaction': '❤️', 'users': ['Svngoku', 'ToKrCZ', 'osanseviero', 'nroggendorff', 'ElhamAryanpur', 'codehouze', 'dillfrescott', 'nbroad', 'beomi', 'KingNish', 'Darkknight12', 'srisree', 'louisbrulenaudet', 'michelgi', 'not-lain', 'UniLLMer', 'ZeroWw'], 'count': 17}, {'reaction': '👍', 'users': ['waveydaveygravy', 'nicolay-r', 'not-lain', 'devalnor', 'JoPmt'], 'count': 5}, {'reaction': '🤗', 'users': ['umseeker', 'Tonic', 'not-lain'], 'count': 3}]",2024-06-11 17:06:00,2025-03-13 06:32:13.977,"[{'_id': '67cc0d7d94aab97938da1980', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/hDP7NVAs3kmcPuneyodHL.png', 'fullname': 'Rizwan Khan', 'name': 'imrizwan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/clem/683959932996666,3788,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,512274540286082,"[{'type': 'text', 'value': 'releasing: smol vision 🌼 ', 'raw': 'releasing: smol vision 🌼 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A repository with notebooks on shrinking, optimizing, speeding-up, customizing large vision models! ', 'raw': 'A repository with notebooks on shrinking, optimizing, speeding-up, customizing large vision models! '}, {'type': 'link', 'href': 'https://github.com/merveenoyan/smol-vision', 'raw': 'https://github.com/merveenoyan/smol-vision'}, {'type': 'new_line', 'raw': '\n'}]","releasing: smol vision 🌼 + +A repository with notebooks on shrinking, optimizing, speeding-up, customizing large vision models! https://github.com/merveenoyan/smol-vision +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/8FUCfUE8UOpPIYgfWzq4T.png'}]",[],"[{'reaction': '🔥', 'users': ['gabrielmbmb', 'Dref360', 'SixOpen', 'osanseviero', 'jgitsolutions', 'Taylor658', 'byoussef', 'louisbrulenaudet', 'fynnkroeger', 'tolgacangoz', 'vikas', 'shaikatasif', 'nbroad', 'kramp', 'afondiel', 'tensorkelechi', 'AxAI'], 'count': 17}, {'reaction': '❤️', 'users': ['dillfrescott', 'Norod78', 'anakin87', 'tolgacangoz'], 'count': 4}, {'reaction': '🚀', 'users': ['Tonic', 'tolgacangoz', 'tensorkelechi'], 'count': 3}, {'reaction': '🤗', 'users': ['tolgacangoz', 'philipp-zettl'], 'count': 2}, {'reaction': '👍', 'users': ['tolgacangoz'], 'count': 1}, {'reaction': '🤝', 'users': ['tolgacangoz'], 'count': 1}, {'reaction': '🧠', 'users': ['tolgacangoz'], 'count': 1}, {'reaction': '🤯', 'users': ['tolgacangoz'], 'count': 1}, {'reaction': '➕', 'users': ['tolgacangoz'], 'count': 1}, {'reaction': '👀', 'users': ['tolgacangoz'], 'count': 1}, {'reaction': '😎', 'users': ['tolgacangoz'], 'count': 1}]",2024-06-11 16:03:36,2024-06-12 18:43:18.862,"[{'_id': '63e3ed6a54f51ea342cdc856', 'avatarUrl': '/avatars/cf7ead21d345b925a7da873f2a5d85a5.svg', 'fullname': 'Prasad Naik', 'name': 'nprasad24', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/merve/512274540286082,3152,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/619f7ba90df8731e0d8b6c54/L0O4z0klhnyjw6eCjrNln.png,267.0,Kadir Nar,kadirnar,602300950760867,"[{'type': 'text', 'value': 'Fast Tachyon SDXL Demo:', 'raw': 'Fast Tachyon SDXL Demo:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo(Zero A100): ', 'raw': 'Demo(Zero A100): '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'kadirnar/BlackHole-Lightning'}, 'url': 'https://huggingface.co/spaces/kadirnar/BlackHole-Lightning', 'raw': 'https://huggingface.co/spaces/kadirnar/BlackHole-Lightning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model Page: ', 'raw': 'Model Page: '}, {'type': 'link', 'href': 'https://civitai.com/models/414108/black-hole', 'raw': 'https://civitai.com/models/414108/black-hole'}]","Fast Tachyon SDXL Demo: + +Demo(Zero A100): https://huggingface.co/spaces/kadirnar/BlackHole-Lightning + +Model Page: https://civitai.com/models/414108/black-hole","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/TisG7IQ1WFlQWVJFnTCpe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/1mYEKMHGDXDztekBybJm-.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/cyFNoVm6RrApJpsojLJic.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/619f7ba90df8731e0d8b6c54/5SeWEpOZRRJGrkF5KPWyf.png'}]",[],"[{'reaction': '🔥', 'users': ['DmitryRyumin', 'louisbrulenaudet', 'victor', 'osanseviero', 'prithivMLmods', 'merve', 'Ramikan-BR', 'stancobridge', 'MatrixIA'], 'count': 9}, {'reaction': '🚀', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '❤️', 'users': ['Ramikan-BR'], 'count': 1}]",2024-06-11 12:28:09,2024-06-11 12:28:09.940,[],/posts/kadirnar/602300950760867,3936,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,967501479049087,"[{'type': 'text', 'value': 'The application of Phi-3-small-8k-instruct for reasoning in Target Sentiment Analysis (TSA), in a zero-shot-learning mode. Comparing with the other 7B vendors, the key takeaways are as follows:', 'raw': 'The application of Phi-3-small-8k-instruct for reasoning in Target Sentiment Analysis (TSA), in a zero-shot-learning mode. Comparing with the other 7B vendors, the key takeaways are as follows:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 1. At the moment this model on the top 🎉 of the 7B sized versions for texts translated in English (🇺🇸) by surpassing Mistral-7B-v0.3 and LLaMA-3-8B 🔥 (Figure 1)', 'raw': '✅ 1. At the moment this model on the top 🎉 of the 7B sized versions for texts translated in English (🇺🇸) by surpassing Mistral-7B-v0.3 and LLaMA-3-8B 🔥 (Figure 1)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ 2. It remains similar to 7B alternatives in original non-english texts (🇷🇺), however show confidence in sentiment presence among other 7B alternatives (checkout F1(PN0) results in Figure 2)', 'raw': '✅ 2. It remains similar to 7B alternatives in original non-english texts (🇷🇺), however show confidence in sentiment presence among other 7B alternatives (checkout F1(PN0) results in Figure 2)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In comparison with its mini (3B) brother Phi-3-mini, the small (7B) version showcases a huge step in reasoning capabilities 🔥', 'raw': 'In comparison with its mini (3B) brother Phi-3-mini, the small (7B) version showcases a huge step in reasoning capabilities 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benchmark: ', 'raw': 'Benchmark: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/RuSentNE-LLM-Benchmark', 'raw': 'https://github.com/nicolay-r/RuSentNE-LLM-Benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-3-small-8k-instruct'}, 'url': 'https://huggingface.co/microsoft/Phi-3-small-8k-instruct', 'raw': 'https://huggingface.co/microsoft/Phi-3-small-8k-instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'link', 'href': 'https://github.com/dialogue-evaluation/RuSentNE-evaluation', 'raw': 'https://github.com/dialogue-evaluation/RuSentNE-evaluation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342)', 'raw': 'Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection: ', 'raw': 'Collection: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101', 'raw': 'https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101'}, {'type': 'new_line', 'raw': '\n'}]","The application of Phi-3-small-8k-instruct for reasoning in Target Sentiment Analysis (TSA), in a zero-shot-learning mode. Comparing with the other 7B vendors, the key takeaways are as follows: +✅ 1. At the moment this model on the top 🎉 of the 7B sized versions for texts translated in English (🇺🇸) by surpassing Mistral-7B-v0.3 and LLaMA-3-8B 🔥 (Figure 1) +✅ 2. It remains similar to 7B alternatives in original non-english texts (🇷🇺), however show confidence in sentiment presence among other 7B alternatives (checkout F1(PN0) results in Figure 2) + +In comparison with its mini (3B) brother Phi-3-mini, the small (7B) version showcases a huge step in reasoning capabilities 🔥 + +Benchmark: https://github.com/nicolay-r/RuSentNE-LLM-Benchmark +Model: https://huggingface.co/microsoft/Phi-3-small-8k-instruct +Dataset: https://github.com/dialogue-evaluation/RuSentNE-evaluation +Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342) +Collection: https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/GBiZXD6Qoe_wELcgYEyGw.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/2ftyz0Bum9UchDq9kwEDR.jpeg'}]",[],"[{'reaction': '👍', 'users': ['Norod78', 'MaziyarPanahi', 'lunarflu', 'jyoung105'], 'count': 4}, {'reaction': '👀', 'users': ['victor', 'lunarflu'], 'count': 2}]",2024-06-06 09:49:43,2024-06-07 17:58:51.902,"[{'_id': '65f0d05cfd7e9976e3dcfffe', 'avatarUrl': '/avatars/c574b7c7ace902c4a613373d3a64e381.svg', 'fullname': 'Pavan Satish', 'name': 'Pavansatish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '64e62d11d27a8292c3637f86', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg', 'fullname': 'Nicolay Rusnachenko', 'name': 'nicolay-r', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 132, 'isFollowing': False}]",/posts/nicolay-r/967501479049087,2119,,6 +https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg,112.0,atayloraerospace,Taylor658,692032777050317,"[{'type': 'text', 'value': ""🔬 This paper introduces Fusion Intelligence (FI), a novel approach integrating the adaptive behaviors of natural organisms 🐝(Bees!)🐝 with AI's computational power."", 'raw': ""🔬 This paper introduces Fusion Intelligence (FI), a novel approach integrating the adaptive behaviors of natural organisms 🐝(Bees!)🐝 with AI's computational power.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper:', 'raw': 'Paper:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fusion Intelligence: Confluence of Natural and Artificial Intelligence for Enhanced Problem-Solving Efficiency (2405.09763)', 'raw': 'Fusion Intelligence: Confluence of Natural and Artificial Intelligence for Enhanced Problem-Solving Efficiency (2405.09763)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2405.09763', 'raw': 'https://arxiv.org/pdf/2405.09763'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Takeaways:', 'raw': 'Key Takeaways:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""* Fusion Intelligence (FI): Combines natural organism efficiency with AI's power. 🌟"", 'raw': ""* Fusion Intelligence (FI): Combines natural organism efficiency with AI's power. 🌟""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Hybrid Approach: Integrates natural abilities with AI for better problem-solving. 🧠🤖', 'raw': '* Hybrid Approach: Integrates natural abilities with AI for better problem-solving. 🧠🤖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Agricultural Applications: Shows a 50% improvement in pollination efficiency. 🐝🌼', 'raw': '* Agricultural Applications: Shows a 50% improvement in pollination efficiency. 🐝🌼'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Energy Efficiency: Consumes only 29.5-50.2 mW per bee, much lower than traditional methods. ⚡', 'raw': '* Energy Efficiency: Consumes only 29.5-50.2 mW per bee, much lower than traditional methods. ⚡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Scalability: Applicable to fields like environmental monitoring and search and rescue. 🌍🔍', 'raw': '* Scalability: Applicable to fields like environmental monitoring and search and rescue. 🌍🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Non-Invasive: Eliminates the need for invasive modifications to biological entities. 🌱', 'raw': '* Non-Invasive: Eliminates the need for invasive modifications to biological entities. 🌱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This research offers a new approach for those interested in sustainable AI solutions. By merging biology with AI, (FI) aims to create solutions for a variety of challenges.', 'raw': 'This research offers a new approach for those interested in sustainable AI solutions. By merging biology with AI, (FI) aims to create solutions for a variety of challenges.'}, {'type': 'new_line', 'raw': '\n'}]","🔬 This paper introduces Fusion Intelligence (FI), a novel approach integrating the adaptive behaviors of natural organisms 🐝(Bees!)🐝 with AI's computational power. + +Paper: +Fusion Intelligence: Confluence of Natural and Artificial Intelligence for Enhanced Problem-Solving Efficiency (2405.09763) +https://arxiv.org/pdf/2405.09763 + +Key Takeaways: +* Fusion Intelligence (FI): Combines natural organism efficiency with AI's power. 🌟 +* Hybrid Approach: Integrates natural abilities with AI for better problem-solving. 🧠🤖 +* Agricultural Applications: Shows a 50% improvement in pollination efficiency. 🐝🌼 +* Energy Efficiency: Consumes only 29.5-50.2 mW per bee, much lower than traditional methods. ⚡ +* Scalability: Applicable to fields like environmental monitoring and search and rescue. 🌍🔍 +* Non-Invasive: Eliminates the need for invasive modifications to biological entities. 🌱 + +This research offers a new approach for those interested in sustainable AI solutions. By merging biology with AI, (FI) aims to create solutions for a variety of challenges. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/uAFZjnCcoh1HjxELHUM8W.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/GUDoNaNZhnEeVw6GkkBw7.jpeg'}]",[],"[{'reaction': '👍', 'users': ['rbgo', 'umutphp', 'Moibe', 'ajibawa-2023', 'seanthw'], 'count': 5}, {'reaction': '🤝', 'users': ['pduf'], 'count': 1}]",2024-06-06 04:54:14,2024-06-08 11:35:08.863,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '6603f028b4344a2b07d4d841', 'avatarUrl': '/avatars/cf2607a4ab6f041f2009aaafbc1dbe71.svg', 'fullname': 'haxor', 'name': 'haxorbroken', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Taylor658/692032777050317,2160,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,120126659383142,"[{'type': 'text', 'value': 'V-Express: 1-Click AI Avatar Talking Heads Video Animation Generator - D-ID Alike - Free Open Source', 'raw': 'V-Express: 1-Click AI Avatar Talking Heads Video Animation Generator - D-ID Alike - Free Open Source'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full Windows YouTube Tutorial : ', 'raw': 'Full Windows YouTube Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/xLqDTVWUSec', 'raw': 'https://youtu.be/xLqDTVWUSec'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Ever wished your static images could talk like magic? Meet V-Express, the groundbreaking open-source and free tool that breathes life into your photos! Whether you have an audio clip or a video, V-Express animates your images to create stunning talking avatars. Just like the acclaimed D-ID Avatar, Wav2Lip, and Avatarify, V-Express turns your still photos into dynamic, speaking personas, but with a twist—it's completely open-source and free to use! With seamless audio integration and the ability to mimic video expressions, V-Express offers an unparalleled experience without any cost or restrictions. Experience the future of digital avatars today—let's dive into how you can get started with V-Express and watch your images come alive!"", 'raw': ""Ever wished your static images could talk like magic? Meet V-Express, the groundbreaking open-source and free tool that breathes life into your photos! Whether you have an audio clip or a video, V-Express animates your images to create stunning talking avatars. Just like the acclaimed D-ID Avatar, Wav2Lip, and Avatarify, V-Express turns your still photos into dynamic, speaking personas, but with a twist—it's completely open-source and free to use! With seamless audio integration and the ability to mimic video expressions, V-Express offers an unparalleled experience without any cost or restrictions. Experience the future of digital avatars today—let's dive into how you can get started with V-Express and watch your images come alive!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1-Click V-Express Installers Scripts ⤵️', 'raw': '1-Click V-Express Installers Scripts ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.patreon.com/posts/105251204', 'raw': 'https://www.patreon.com/posts/105251204'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Requirements Step by Step Tutorial ⤵️', 'raw': 'Requirements Step by Step Tutorial ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/-NjNy7afOQ0', 'raw': 'https://youtu.be/-NjNy7afOQ0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official Rope GitHub Repository Free To Install and Use ⤵️', 'raw': 'Official Rope GitHub Repository Free To Install and Use ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/tencent-ailab/V-Express', 'raw': 'https://github.com/tencent-ailab/V-Express'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'SECourses Discord Channel to Get Full Support ⤵️', 'raw': 'SECourses Discord Channel to Get Full Support ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388', 'raw': 'https://discord.com/servers/software-engineering-courses-secourses-772774097734074388'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","V-Express: 1-Click AI Avatar Talking Heads Video Animation Generator - D-ID Alike - Free Open Source + +Full Windows YouTube Tutorial : https://youtu.be/xLqDTVWUSec + +Ever wished your static images could talk like magic? Meet V-Express, the groundbreaking open-source and free tool that breathes life into your photos! Whether you have an audio clip or a video, V-Express animates your images to create stunning talking avatars. Just like the acclaimed D-ID Avatar, Wav2Lip, and Avatarify, V-Express turns your still photos into dynamic, speaking personas, but with a twist—it's completely open-source and free to use! With seamless audio integration and the ability to mimic video expressions, V-Express offers an unparalleled experience without any cost or restrictions. Experience the future of digital avatars today—let's dive into how you can get started with V-Express and watch your images come alive! + +1-Click V-Express Installers Scripts ⤵️ +https://www.patreon.com/posts/105251204 + +Requirements Step by Step Tutorial ⤵️ +https://youtu.be/-NjNy7afOQ0 + +Official Rope GitHub Repository Free To Install and Use ⤵️ +https://github.com/tencent-ailab/V-Express + +SECourses Discord Channel to Get Full Support ⤵️ +https://discord.com/servers/software-engineering-courses-secourses-772774097734074388 + + +",[],[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'zephyrwang', 'AdinaY', 'umair894', 'Jason233'], 'count': 5}, {'reaction': '🚀', 'users': ['MonsterMMORPG', 'zephyrwang', 'AdinaY'], 'count': 3}, {'reaction': '👀', 'users': ['MonsterMMORPG', 'zephyrwang', 'TravelingMan'], 'count': 3}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'odysonn'], 'count': 2}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👍', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤯', 'users': ['MonsterMMORPG'], 'count': 1}]",2024-06-06 00:48:45,2024-06-06 00:48:45.170,[],/posts/MonsterMMORPG/120126659383142,4368,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/630904f2c038bf42d56d9d11/S8mYgFpPSHYOiifBnNfwG.jpeg,64.0,Harpreet Sahota,harpreetsahota,870025460089676,"[{'type': 'text', 'value': 'The Coachella of Computer Vision, CVPR, is right around the corner. In anticipation of the conference, I curated a dataset of the papers. ', 'raw': 'The Coachella of Computer Vision, CVPR, is right around the corner. In anticipation of the conference, I curated a dataset of the papers. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'll have a technical blog post out tomorrow doing some analysis on the dataset, but I'm so hyped that I wanted to get it out to the community ASAP."", 'raw': ""I'll have a technical blog post out tomorrow doing some analysis on the dataset, but I'm so hyped that I wanted to get it out to the community ASAP.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The dataset consists of the following fields:', 'raw': 'The dataset consists of the following fields:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - An image of the first page of the paper', 'raw': ' - An image of the first page of the paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'title', 'raw': '`title`'}, {'type': 'text', 'value': ': The title of the paper', 'raw': ': The title of the paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'authors_list', 'raw': '`authors_list`'}, {'type': 'text', 'value': ': The list of authors', 'raw': ': The list of authors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'abstract', 'raw': '`abstract`'}, {'type': 'text', 'value': ': The abstract of the paper', 'raw': ': The abstract of the paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'arxiv_link', 'raw': '`arxiv_link`'}, {'type': 'text', 'value': ': Link to the paper on arXiv', 'raw': ': Link to the paper on arXiv'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'other_link', 'raw': '`other_link`'}, {'type': 'text', 'value': ': Link to the project page, if found', 'raw': ': Link to the project page, if found'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'category_name', 'raw': '`category_name`'}, {'type': 'text', 'value': ': The primary category this paper according to [arXiv taxonomy](', 'raw': ': The primary category this paper according to [arXiv taxonomy]('}, {'type': 'link', 'href': 'https://arxiv.org/category_taxonomy', 'raw': 'https://arxiv.org/category_taxonomy'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'all_categories', 'raw': '`all_categories`'}, {'type': 'text', 'value': ': All categories this paper falls into, according to arXiv taxonomy', 'raw': ': All categories this paper falls into, according to arXiv taxonomy'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - ', 'raw': ' - '}, {'type': 'inline_code', 'code': 'keywords', 'raw': '`keywords`'}, {'type': 'text', 'value': ': Extracted using GPT-4o', 'raw': ': Extracted using GPT-4o'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's how I created the dataset 👇🏼"", 'raw': ""Here's how I created the dataset 👇🏼""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Generic code for building this dataset can be found [here](', 'raw': 'Generic code for building this dataset can be found [here]('}, {'type': 'link', 'href': 'https://github.com/harpreetsahota204/CVPR-2024-Papers', 'raw': 'https://github.com/harpreetsahota204/CVPR-2024-Papers'}, {'type': 'text', 'value': ').', 'raw': ').'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This dataset was built using the following steps:', 'raw': 'This dataset was built using the following steps:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Scrape the CVPR 2024 website for accepted papers', 'raw': '- Scrape the CVPR 2024 website for accepted papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Use DuckDuckGo to search for a link to the paper's abstract on arXiv"", 'raw': ""- Use DuckDuckGo to search for a link to the paper's abstract on arXiv""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Use arXiv.py (python wrapper for the arXiv API) to extract the abstract and categories, and download the pdf for each paper', 'raw': '- Use arXiv.py (python wrapper for the arXiv API) to extract the abstract and categories, and download the pdf for each paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Use pdf2image to save the image of paper's first page"", 'raw': ""- Use pdf2image to save the image of paper's first page""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Use GPT-4o to extract keywords from the abstract', 'raw': '- Use GPT-4o to extract keywords from the abstract'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Voxel51/CVPR_2024_Papers'}, 'url': 'https://huggingface.co/datasets/Voxel51/CVPR_2024_Papers', 'raw': 'https://huggingface.co/datasets/Voxel51/CVPR_2024_Papers'}]","The Coachella of Computer Vision, CVPR, is right around the corner. In anticipation of the conference, I curated a dataset of the papers. + +I'll have a technical blog post out tomorrow doing some analysis on the dataset, but I'm so hyped that I wanted to get it out to the community ASAP. + +The dataset consists of the following fields: + + - An image of the first page of the paper + - `title`: The title of the paper + - `authors_list`: The list of authors + - `abstract`: The abstract of the paper + - `arxiv_link`: Link to the paper on arXiv + - `other_link`: Link to the project page, if found + - `category_name`: The primary category this paper according to [arXiv taxonomy](https://arxiv.org/category_taxonomy) + - `all_categories`: All categories this paper falls into, according to arXiv taxonomy + - `keywords`: Extracted using GPT-4o + +Here's how I created the dataset 👇🏼 + +Generic code for building this dataset can be found [here](https://github.com/harpreetsahota204/CVPR-2024-Papers). + +This dataset was built using the following steps: + +- Scrape the CVPR 2024 website for accepted papers +- Use DuckDuckGo to search for a link to the paper's abstract on arXiv +- Use arXiv.py (python wrapper for the arXiv API) to extract the abstract and categories, and download the pdf for each paper +- Use pdf2image to save the image of paper's first page +- Use GPT-4o to extract keywords from the abstract + +https://huggingface.co/datasets/Voxel51/CVPR_2024_Papers","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/630904f2c038bf42d56d9d11/eAogxMSfS8eoIreSoURQQ.png'}]",[],"[{'reaction': '🔥', 'users': ['jamarks', 'harpreetsahota', 'orkut', 'jetsadaporn87', 'ajibawa-2023'], 'count': 5}, {'reaction': '🚀', 'users': ['harpreetsahota'], 'count': 1}, {'reaction': '👍', 'users': ['fffiloni'], 'count': 1}]",2024-06-05 23:54:03,2024-06-05 23:54:03.520,[],/posts/harpreetsahota/870025460089676,2362,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg,1593.0,Clelia Astra Bertelli,as-cle-bert,387352788347389,"[{'type': 'text', 'value': ""🌍 As we all know, Planet Earth is undergoing an unprecedented climate crisis, almost totally due to human activities: we haven't got much time left before it's too late to take action, and one of the key fields where we need to urgently operate are climate-aware financial investments..."", 'raw': ""🌍 As we all know, Planet Earth is undergoing an unprecedented climate crisis, almost totally due to human activities: we haven't got much time left before it's too late to take action, and one of the key fields where we need to urgently operate are climate-aware financial investments...""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🤖 ... And that's where AI comes into the play: we can indeed try to leverage, tweak and expand its knowledge in the field to extract valuable climate-aware solutions. "", 'raw': ""🤖 ... And that's where AI comes into the play: we can indeed try to leverage, tweak and expand its knowledge in the field to extract valuable climate-aware solutions. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 I tried to make something alike: exploiting ', 'raw': '🤗 I tried to make something alike: exploiting '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'climatebert/tcfd_recommendations'}, 'url': 'https://huggingface.co/datasets/climatebert/tcfd_recommendations', 'raw': 'https://huggingface.co/datasets/climatebert/tcfd_recommendations'}, {'type': 'text', 'value': ' as knowledge base, Qdrant Cloud as vector store service and ', 'raw': ' as knowledge base, Qdrant Cloud as vector store service and '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-3-mini-128k-instruct'}, 'url': 'https://huggingface.co/microsoft/Phi-3-mini-128k-instruct', 'raw': 'https://huggingface.co/microsoft/Phi-3-mini-128k-instruct'}, {'type': 'text', 'value': ' as LLM (provided via API from ', 'raw': ' as LLM (provided via API from '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/eswardivi/Phi-3-mini-128k-instruct', 'raw': 'https://huggingface.co/spaces/eswardivi/Phi-3-mini-128k-instruct'}, {'type': 'text', 'value': ' by ', 'raw': ' by '}, {'type': 'mention', 'user': 'eswardivi', 'raw': '@eswardivi'}, {'type': 'text', 'value': '), I built an AI assistant to help you find climate-oriented solutions for your investments, companies, or simply for your everyday life🎒.', 'raw': '), I built an AI assistant to help you find climate-oriented solutions for your investments, companies, or simply for your everyday life🎒.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Find it here: ', 'raw': 'Find it here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'as-cle-bert/cLLiMateChat'}, 'url': 'https://huggingface.co/spaces/as-cle-bert/cLLiMateChat', 'raw': 'https://huggingface.co/spaces/as-cle-bert/cLLiMateChat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://github.com/AstraBert/qdrant-ai-chat', 'raw': 'https://github.com/AstraBert/qdrant-ai-chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Website: ', 'raw': 'Website: '}, {'type': 'link', 'href': 'https://astrabert.github.io/qdrant-ai-chat/', 'raw': 'https://astrabert.github.io/qdrant-ai-chat/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Be kind to our Planet, we only got one💚', 'raw': 'Be kind to our Planet, we only got one💚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(Shout-outs to ', 'raw': '(Shout-outs to '}, {'type': 'mention', 'user': 'JohnSmith9982', 'raw': '@JohnSmith9982'}, {'type': 'text', 'value': ' whose ', 'raw': ' whose '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'JohnSmith9982/small_and_pretty'}, 'url': 'https://huggingface.co/spaces/JohnSmith9982/small_and_pretty', 'raw': 'https://huggingface.co/spaces/JohnSmith9982/small_and_pretty'}, {'type': 'text', 'value': ' Gradio theme was used to build my application🚀)', 'raw': ' Gradio theme was used to build my application🚀)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PS: 🌱Curious of knowing what is your carbon footprint? Head over to this ML-backed HF Space I built to discover it: ', 'raw': 'PS: 🌱Curious of knowing what is your carbon footprint? Head over to this ML-backed HF Space I built to discover it: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'as-cle-bert/carbon-footprint-predictor'}, 'url': 'https://huggingface.co/spaces/as-cle-bert/carbon-footprint-predictor', 'raw': 'https://huggingface.co/spaces/as-cle-bert/carbon-footprint-predictor'}]","🌍 As we all know, Planet Earth is undergoing an unprecedented climate crisis, almost totally due to human activities: we haven't got much time left before it's too late to take action, and one of the key fields where we need to urgently operate are climate-aware financial investments... +🤖 ... And that's where AI comes into the play: we can indeed try to leverage, tweak and expand its knowledge in the field to extract valuable climate-aware solutions. +🤗 I tried to make something alike: exploiting https://huggingface.co/datasets/climatebert/tcfd_recommendations as knowledge base, Qdrant Cloud as vector store service and https://huggingface.co/microsoft/Phi-3-mini-128k-instruct as LLM (provided via API from https://huggingface.co/spaces/eswardivi/Phi-3-mini-128k-instruct by @eswardivi), I built an AI assistant to help you find climate-oriented solutions for your investments, companies, or simply for your everyday life🎒. +Find it here: https://huggingface.co/spaces/as-cle-bert/cLLiMateChat + +GitHub: https://github.com/AstraBert/qdrant-ai-chat +Website: https://astrabert.github.io/qdrant-ai-chat/ + +Be kind to our Planet, we only got one💚 + +(Shout-outs to @JohnSmith9982 whose https://huggingface.co/spaces/JohnSmith9982/small_and_pretty Gradio theme was used to build my application🚀) + +PS: 🌱Curious of knowing what is your carbon footprint? Head over to this ML-backed HF Space I built to discover it: https://huggingface.co/spaces/as-cle-bert/carbon-footprint-predictor",[],"[{'_id': '630bc25d4c0945d20b880e9a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/630bc25d4c0945d20b880e9a/CjDLfmCGcCkOS4E1t756M.jpeg', 'fullname': 'Divi Eswar Chowdary', 'name': 'eswardivi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 27}, {'_id': '6345717d547c70e4b7cc4999', 'avatarUrl': '/avatars/ed6996a557141e18f2be161e4e72caae.svg', 'fullname': 'John Smith', 'name': 'JohnSmith9982', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 32}]","[{'reaction': '👍', 'users': ['Taylor658', 'lunarflu', 'neovalle', 'LucasFlorentino', 'Severian', 'Ramikan-BR', 'louisbrulenaudet'], 'count': 7}, {'reaction': '❤️', 'users': ['lunarflu', 'LucasFlorentino', 'Ramikan-BR'], 'count': 3}, {'reaction': '🤗', 'users': ['lunarflu', 'Ramikan-BR'], 'count': 2}]",2024-06-05 21:08:21,2024-06-08 09:33:08.805,"[{'_id': '630cf5d14ca0a22768bbe10c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/630cf5d14ca0a22768bbe10c/R6qfkfeKCNdiSl5clsorr.png', 'fullname': 'Aaron Day', 'name': 'aaronday3', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 26, 'isFollowing': False}, {'_id': '65e330e7edc2f7306e252448', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65e330e7edc2f7306e252448/oYAOGhbPaXDTbEoJoSLMB.jpeg', 'fullname': 'Clelia Astra Bertelli', 'name': 'as-cle-bert', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1593, 'isFollowing': False}]",/posts/as-cle-bert/387352788347389,1475,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,859744242518946,"[{'type': 'text', 'value': ""THUDM has released GLM-4V-9B and it's.. chatty! 😂 "", 'raw': ""THUDM has released GLM-4V-9B and it's.. chatty! 😂 ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I asked it to describe my favorite Howl's Moving Castle scene and here's how it went 👇🏻"", 'raw': ""I asked it to describe my favorite Howl's Moving Castle scene and here's how it went 👇🏻""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""joke aside it seems to outperform the previous VLMs. however the license isn't open-source 📈 "", 'raw': ""joke aside it seems to outperform the previous VLMs. however the license isn't open-source 📈 ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'model repo: ', 'raw': 'model repo: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'THUDM/glm-4v-9b'}, 'url': 'https://huggingface.co/THUDM/glm-4v-9b', 'raw': 'https://huggingface.co/THUDM/glm-4v-9b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'a community member has built a demo: ', 'raw': 'a community member has built a demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'vilarin/VL-Chatbox'}, 'url': 'https://huggingface.co/spaces/vilarin/VL-Chatbox', 'raw': 'https://huggingface.co/spaces/vilarin/VL-Chatbox'}]","THUDM has released GLM-4V-9B and it's.. chatty! 😂 +I asked it to describe my favorite Howl's Moving Castle scene and here's how it went 👇🏻 + +joke aside it seems to outperform the previous VLMs. however the license isn't open-source 📈 +model repo: https://huggingface.co/THUDM/glm-4v-9b +a community member has built a demo: https://huggingface.co/spaces/vilarin/VL-Chatbox","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/aZSoa0GcqslNz4ODPQy-4.jpeg'}]",[],"[{'reaction': '👍', 'users': ['orrinin', 'ranork', 'lunarflu', 'Timilla'], 'count': 4}, {'reaction': '❤️', 'users': ['vilarin', 'lunarflu'], 'count': 2}]",2024-06-05 19:59:48,2024-06-06 04:35:17.154,"[{'_id': '642827944fe87caede802784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/642827944fe87caede802784/a7s3Ub9Cy6-PuuaX8wwXm.png', 'fullname': 'VILARIN', 'name': 'vilarin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 83, 'isFollowing': False}]",/posts/merve/859744242518946,2741,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/o-5N9QyjHgmSMk69e3O55.png,274.0,Evgeniy Hristoforu,ehristoforu,760542103453473,"[{'type': 'text', 'value': 'I decided to play around with FluentlyXL v4 😉', 'raw': 'I decided to play around with FluentlyXL v4 😉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Model: ', 'raw': '👉 Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'fluently/Fluently-XL-v4'}, 'url': 'https://huggingface.co/fluently/Fluently-XL-v4', 'raw': 'https://huggingface.co/fluently/Fluently-XL-v4'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✨ Playground: ', 'raw': '✨ Playground: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'fluently/Fluently-Playground'}, 'url': 'https://huggingface.co/spaces/fluently/Fluently-Playground', 'raw': 'https://huggingface.co/spaces/fluently/Fluently-Playground'}]","I decided to play around with FluentlyXL v4 😉 + +👉 Model: https://huggingface.co/fluently/Fluently-XL-v4 +✨ Playground: https://huggingface.co/spaces/fluently/Fluently-Playground","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/WXzFAsT65FNP6St76x7CL.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/JcRajapdya6sE--6NnL0E.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/0xkUod7T5y3Dnz39AtU_7.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/HByXycKb-HV9i6xCk5R_e.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/xUHw8VN5c0NUv4I8s73uZ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65a3d8d58448f47df24c041a/gN8sarNOO18lLMNV5JH4N.png'}]",[],"[{'reaction': '👍', 'users': ['ehristoforu', 'dreamdrop-art', 'S1m0neAI', 'lunarflu', 'LucasFlorentino', 'TravelingMan', 's3nh', 'victor', 'Ramikan-BR', 'cbensimon', 'louisbrulenaudet', 'ifmain'], 'count': 12}, {'reaction': '🔥', 'users': ['lunarflu', 'Ramikan-BR', 'cbensimon', 'Westis', 'dreamdrop-art'], 'count': 5}, {'reaction': '👀', 'users': ['Ramikan-BR', 'GPT007', 'dreamdrop-art'], 'count': 3}, {'reaction': '🚀', 'users': ['Ramikan-BR', 'dreamdrop-art'], 'count': 2}, {'reaction': '❤️', 'users': ['Ramikan-BR', 'dreamdrop-art'], 'count': 2}]",2024-06-05 19:40:37,2024-06-05 19:40:37.970,[],/posts/ehristoforu/760542103453473,1929,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64e62d11d27a8292c3637f86/aptDeBHpCJxcREj6KPLN1.jpeg,132.0,Nicolay Rusnachenko,nicolay-r,588142490319312,"[{'type': 'text', 'value': '📢 The most recent Mistral-7B-Instruct-v0.3 release showcases more robust advances in zero-shot-mode mode on Target Sentiment Analysis.', 'raw': '📢 The most recent Mistral-7B-Instruct-v0.3 release showcases more robust advances in zero-shot-mode mode on Target Sentiment Analysis.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧪 We experiment with the original texts (🇷🇺 ) and their translated version into English (🇺🇸).', 'raw': '🧪 We experiment with the original texts (🇷🇺 ) and their translated version into English (🇺🇸).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💡 The key takeaways on the expectation from this model are as follows:', 'raw': '💡 The key takeaways on the expectation from this model are as follows:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔️ 1. On translated texts into English (🇺🇸), it surpasses LLaMA-3 and and nearly touches MOE Mixtral 8x7B versions being quite precise by F1 across all the classes by F1(PN)', 'raw': '✔️ 1. On translated texts into English (🇺🇸), it surpasses LLaMA-3 and and nearly touches MOE Mixtral 8x7B versions being quite precise by F1 across all the classes by F1(PN)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔️2. On orignal texts (🇷🇺) It slightly surpasses LLaMA-3 by F1(PN) by being less tolerant in neutral (F1(PN0)). Using larger versions (Mixtral) are still preferable choice for reasoning 🧠 in non-eng texts.', 'raw': '✔️2. On orignal texts (🇷🇺) It slightly surpasses LLaMA-3 by F1(PN) by being less tolerant in neutral (F1(PN0)). Using larger versions (Mixtral) are still preferable choice for reasoning 🧠 in non-eng texts.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✔️3. You can clearly see the difference between 7B version and MOE (figure 3) by F1(PN0)', 'raw': '✔️3. You can clearly see the difference between 7B version and MOE (figure 3) by F1(PN0)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benchmark: ', 'raw': 'Benchmark: '}, {'type': 'link', 'href': 'https://github.com/nicolay-r/RuSentNE-LLM-Benchmark', 'raw': 'https://github.com/nicolay-r/RuSentNE-LLM-Benchmark'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'mistralai/Mistral-7B-Instruct-v0.3'}, 'url': 'https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.3', 'raw': 'https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.3'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'link', 'href': 'https://github.com/dialogue-evaluation/RuSentNE-evaluation', 'raw': 'https://github.com/dialogue-evaluation/RuSentNE-evaluation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342)', 'raw': 'Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collection: ', 'raw': 'Collection: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101', 'raw': 'https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101'}]","📢 The most recent Mistral-7B-Instruct-v0.3 release showcases more robust advances in zero-shot-mode mode on Target Sentiment Analysis. +🧪 We experiment with the original texts (🇷🇺 ) and their translated version into English (🇺🇸). +💡 The key takeaways on the expectation from this model are as follows: +✔️ 1. On translated texts into English (🇺🇸), it surpasses LLaMA-3 and and nearly touches MOE Mixtral 8x7B versions being quite precise by F1 across all the classes by F1(PN) +✔️2. On orignal texts (🇷🇺) It slightly surpasses LLaMA-3 by F1(PN) by being less tolerant in neutral (F1(PN0)). Using larger versions (Mixtral) are still preferable choice for reasoning 🧠 in non-eng texts. +✔️3. You can clearly see the difference between 7B version and MOE (figure 3) by F1(PN0) +Benchmark: https://github.com/nicolay-r/RuSentNE-LLM-Benchmark +Model: https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.3 +Dataset: https://github.com/dialogue-evaluation/RuSentNE-evaluation +Related paper: Large Language Models in Targeted Sentiment Analysis (2404.12342) +Collection: https://huggingface.co/collections/nicolay-r/sentiment-analysis-665ba391e0eba729021ea101","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/8fiDZNwz8ThjLIpoIxC4W.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/zoifR0J2-cEVNfTvQ9N4E.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64e62d11d27a8292c3637f86/XUuG7ExiT2ZwVSAWxdxst.png'}]",[],"[{'reaction': '👍', 'users': ['victor', 'kristaller486', 'osanseviero', 'lunarflu'], 'count': 4}]",2024-06-05 09:23:29,2024-06-05 09:30:07.493,[],/posts/nicolay-r/588142490319312,2413,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,612249613019261,"[{'type': 'text', 'value': 'A great vision language benchmark: MM-UPD evaluates how model responds to unsolvable problems 🤓 ', 'raw': 'A great vision language benchmark: MM-UPD evaluates how model responds to unsolvable problems 🤓 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LLaVA 1.6 is outperforming proprietary VLMs, making it a very robust choice for production!', 'raw': 'LLaVA 1.6 is outperforming proprietary VLMs, making it a very robust choice for production!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It is now hosted as a leaderboard ', 'raw': 'It is now hosted as a leaderboard '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'MM-UPD/MM-UPD_Leaderboard'}, 'url': 'https://huggingface.co/spaces/MM-UPD/MM-UPD_Leaderboard', 'raw': 'https://huggingface.co/spaces/MM-UPD/MM-UPD_Leaderboard'}, {'type': 'text', 'value': ' 🏆💕', 'raw': ' 🏆💕'}]","A great vision language benchmark: MM-UPD evaluates how model responds to unsolvable problems 🤓 +LLaVA 1.6 is outperforming proprietary VLMs, making it a very robust choice for production! + +It is now hosted as a leaderboard https://huggingface.co/spaces/MM-UPD/MM-UPD_Leaderboard 🏆💕","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/9A9gU8C6oQFdkZ9zwMFNh.png'}]",[],"[{'reaction': '🚀', 'users': ['victor', 'Ramikan-BR', 'osanseviero', 'aryansinghtech', 'jeremy-london', 'lunarflu', 'taufiqdp', 'AtsuMiyai'], 'count': 8}]",2024-06-05 08:57:30,2024-06-05 08:57:30.329,[],/posts/merve/612249613019261,2687,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,295094188881098,"[{'type': 'text', 'value': 'Hello, Vision World!', 'raw': 'Hello, Vision World!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'AI4Chem/ChemLLM-20B-Chat-DPO'}, 'url': 'https://huggingface.co/AI4Chem/ChemLLM-20B-Chat-DPO', 'raw': 'https://huggingface.co/AI4Chem/ChemLLM-20B-Chat-DPO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.06852'}, 'url': 'https://huggingface.co/papers/2402.06852', 'raw': 'https://huggingface.co/papers/2402.06852', 'label': 'ChemLLM: A Chemical Large Language Model (2402.06852)'}, {'type': 'new_line', 'raw': '\n'}]","Hello, Vision World! +https://huggingface.co/AI4Chem/ChemLLM-20B-Chat-DPO +https://huggingface.co/papers/2402.06852 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64bce15bafd1e46c5504ad38/0Qo4MtdIlfQdr5tef1XiC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64bce15bafd1e46c5504ad38/tRFOtI0pXiLO8VCLSXPL-.png'}]",[],"[{'reaction': '🤗', 'users': ['di-zhang-fdu', 'victor', 'KingNish', 'osanseviero', 'Taylor658', 'louisbrulenaudet', 'abdesBen', 'lunarflu'], 'count': 8}, {'reaction': '🚀', 'users': ['victor', 'lunarflu'], 'count': 2}, {'reaction': '👍', 'users': ['abdesBen', 'lunarflu'], 'count': 2}]",2024-06-05 07:39:10,2024-06-06 09:58:18.842,"[{'_id': '662621c10e31d65ecc604512', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/662621c10e31d65ecc604512/SIh9DBJT447wsZ7a6dKL0.jpeg', 'fullname': 'CAI', 'name': 'Ruoqi7', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '64bce15bafd1e46c5504ad38', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png', 'fullname': 'Di Zhang', 'name': 'di-zhang-fdu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 151, 'isFollowing': False}]",/posts/di-zhang-fdu/295094188881098,2059,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/61bf84c8ca59d6d196a1b4e8/L_NvUwlMYcye9X35z6f7e.jpeg,63.0,Amir Hossein Kargaran,kargaranamir,236711741382850,"[{'type': 'text', 'value': 'Introducing GlotCC: a new 2TB corpus based on an early 2024 CommonCrawl snapshot with data for 1000+ languages.', 'raw': 'Introducing GlotCC: a new 2TB corpus based on an early 2024 CommonCrawl snapshot with data for 1000+ languages.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 corpus v1: ', 'raw': '🤗 corpus v1: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'cis-lmu/GlotCC-V1'}, 'url': 'https://huggingface.co/datasets/cis-lmu/GlotCC-V1', 'raw': 'https://huggingface.co/datasets/cis-lmu/GlotCC-V1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐱 pipeline v3: ', 'raw': '🐱 pipeline v3: '}, {'type': 'link', 'href': 'https://github.com/cisnlp/GlotCC', 'raw': 'https://github.com/cisnlp/GlotCC'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details? Stay tuned for our upcoming paper.', 'raw': 'More details? Stay tuned for our upcoming paper.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More data? In the next version, we plan to include additional snapshots of CommonCrawl.', 'raw': 'More data? In the next version, we plan to include additional snapshots of CommonCrawl.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Limitation: Due to the lower frequency of low-resource languages compared to others, there are sometimes only a few sentences available for very low-resource languages. However, the data volume for English in this version stands at 750GB, and the top 200 languages still have a strong presence in our data (see plot attached; we write the index for every 20 languages, meaning the 10th index is the 200th language).', 'raw': 'Limitation: Due to the lower frequency of low-resource languages compared to others, there are sometimes only a few sentences available for very low-resource languages. However, the data volume for English in this version stands at 750GB, and the top 200 languages still have a strong presence in our data (see plot attached; we write the index for every 20 languages, meaning the 10th index is the 200th language).'}, {'type': 'new_line', 'raw': '\n'}]","Introducing GlotCC: a new 2TB corpus based on an early 2024 CommonCrawl snapshot with data for 1000+ languages. + +🤗 corpus v1: https://huggingface.co/datasets/cis-lmu/GlotCC-V1 +🐱 pipeline v3: https://github.com/cisnlp/GlotCC + +More details? Stay tuned for our upcoming paper. +More data? In the next version, we plan to include additional snapshots of CommonCrawl. + +Limitation: Due to the lower frequency of low-resource languages compared to others, there are sometimes only a few sentences available for very low-resource languages. However, the data volume for English in this version stands at 750GB, and the top 200 languages still have a strong presence in our data (see plot attached; we write the index for every 20 languages, meaning the 10th index is the 200th language). +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61bf84c8ca59d6d196a1b4e8/czI1nI2D3_S03yK3eMcdS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61bf84c8ca59d6d196a1b4e8/-Pegk25kv1xHpsNQOTocf.jpeg'}]",[],"[{'reaction': '👍', 'users': ['kargaranamir', 'osanseviero', 'eliebak', 'yjernite', 'ayymen', 'Jakh0103'], 'count': 6}]",2024-06-01 18:53:22,2024-06-01 18:53:22.222,[],/posts/kargaranamir/236711741382850,1433,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg,211.0,Bram Vanroy,BramVanroy,832731542304549,"[{'type': 'text', 'value': ""The InstructGPT paper mentions that they insert 10% pretraining data during SFT, which they find improves the effect of PPO (IIUC). Has anyone else done later ablations on this? I've only seen the inverse suggested, mixing in SFT data during pretraining."", 'raw': ""The InstructGPT paper mentions that they insert 10% pretraining data during SFT, which they find improves the effect of PPO (IIUC). Has anyone else done later ablations on this? I've only seen the inverse suggested, mixing in SFT data during pretraining.""}]","The InstructGPT paper mentions that they insert 10% pretraining data during SFT, which they find improves the effect of PPO (IIUC). Has anyone else done later ablations on this? I've only seen the inverse suggested, mixing in SFT data during pretraining.",[],[],"[{'reaction': '👀', 'users': ['Tonic', 'osanseviero', 'mootje'], 'count': 3}]",2024-06-01 17:59:22,2024-06-03 18:49:15.013,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '5f0c746619cb630495b814fd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594651707950-noauth.jpeg', 'fullname': 'Lewis Tunstall', 'name': 'lewtun', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1028, 'isFollowing': False}]",/posts/BramVanroy/832731542304549,2221,,2 +/avatars/fadf0d7169222c94b635859a196c38ef.svg,36.0,Mohamed Salama,Salama1429,845263544496775,"[{'type': 'text', 'value': '📺 Introducing the YouTube-Commons Dataset 📺', 'raw': '📺 Introducing the YouTube-Commons Dataset 📺'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Overview: The YouTube Commons Dataset is a comprehensive collection of 30 billion words from 15,112,121 original and automatically translated transcripts, drawn from 2,063,066 videos on YouTube.', 'raw': '🌐 Overview: The YouTube Commons Dataset is a comprehensive collection of 30 billion words from 15,112,121 original and automatically translated transcripts, drawn from 2,063,066 videos on YouTube.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 License: All videos are shared under the CC-BY license, with the majority (71%) in English.', 'raw': '🔗 License: All videos are shared under the CC-BY license, with the majority (71%) in English.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 Applications: This dataset is ideal for training powerful AI models for converting speech to text (ASR) and translation models.', 'raw': '🤖 Applications: This dataset is ideal for training powerful AI models for converting speech to text (ASR) and translation models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Utilization: The text can be used for model training and is republishable for reproducibility purposes.', 'raw': '📊 Utilization: The text can be used for model training and is republishable for reproducibility purposes.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤝 Collaboration: This dataset is the result of a collaboration between state start-up LANGU:IA, the French Ministry of Culture, and DINUM. It will be expanded in the coming months.', 'raw': '🤝 Collaboration: This dataset is the result of a collaboration between state start-up LANGU:IA, the French Ministry of Culture, and DINUM. It will be expanded in the coming months.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Explore the dataset here: ', 'raw': '🔗 Explore the dataset here: '}, {'type': 'link', 'href': 'https://lnkd.in/d_paWKFE', 'raw': 'https://lnkd.in/d_paWKFE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#YouTubeCommons #AIResearch #MachineLearning #OpenData #ArtificialIntelligence #NLP #Dataset #TechCollaboration #Innovation #DigitalTransformation', 'raw': '#YouTubeCommons #AIResearch #MachineLearning #OpenData #ArtificialIntelligence #NLP #Dataset #TechCollaboration #Innovation #DigitalTransformation'}]","📺 Introducing the YouTube-Commons Dataset 📺 + +🌐 Overview: The YouTube Commons Dataset is a comprehensive collection of 30 billion words from 15,112,121 original and automatically translated transcripts, drawn from 2,063,066 videos on YouTube. + +🔗 License: All videos are shared under the CC-BY license, with the majority (71%) in English. + +🤖 Applications: This dataset is ideal for training powerful AI models for converting speech to text (ASR) and translation models. + +📊 Utilization: The text can be used for model training and is republishable for reproducibility purposes. + +🤝 Collaboration: This dataset is the result of a collaboration between state start-up LANGU:IA, the French Ministry of Culture, and DINUM. It will be expanded in the coming months. + +🔗 Explore the dataset here: https://lnkd.in/d_paWKFE + +#YouTubeCommons #AIResearch #MachineLearning #OpenData #ArtificialIntelligence #NLP #Dataset #TechCollaboration #Innovation #DigitalTransformation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f50684ea5bd6b1abc2096a/XZ9DP6md0B6BT9dWY8mD6.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['Salama1429', 'Ramikan-BR', 'KingNish', 'NHLOCAL', 'Tonic', 'jshuadvd', 'Vladuzz', 'rreed-pha', 'KIRNEILL', 'thomwolf', 'hiauiarau', 'dingo-actual'], 'count': 12}, {'reaction': '❤️', 'users': ['Salama1429', 'Ramikan-BR', 'Tonic', 'thesven', 'jshuadvd', 'dillfrescott', 'thomwolf', 'hikmalab'], 'count': 8}, {'reaction': '🚀', 'users': ['Salama1429', 'Ramikan-BR', 'Tonic', 'thesven', 'jshuadvd', 'GPT007', 'thomwolf'], 'count': 7}, {'reaction': '😎', 'users': ['Salama1429', 'Ramikan-BR', 'Tonic', 'jshuadvd', 'GPT007', 'thomwolf'], 'count': 6}, {'reaction': '🧠', 'users': ['Salama1429', 'Ramikan-BR', 'Tonic', 'jshuadvd', 'nataliaElv'], 'count': 5}, {'reaction': '🤗', 'users': ['Salama1429', 'Ramikan-BR', 'Tonic'], 'count': 3}, {'reaction': '👍', 'users': ['Salama1429', 'Tonic'], 'count': 2}, {'reaction': '🤝', 'users': ['Salama1429', 'Tonic'], 'count': 2}, {'reaction': '👀', 'users': ['Salama1429', 'Tonic'], 'count': 2}]",2024-06-01 13:11:43,2024-06-01 13:13:14.865,[],/posts/Salama1429/845263544496775,2593,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6401c8c9f98fbc64bcd7dca1/MOSgc_mPbfUZ-354osy1v.png,241.0,FBL,fblgit,862353567134059,"[{'type': 'text', 'value': 'Introducing UNA-ThePitbull Series', 'raw': 'Introducing UNA-ThePitbull Series'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We are happy to announce the release of our latest model UNA-ThePitbull, the most powerful model below 70B in the industry. In this new generation, inspired on our previous Beagle series we curated a model that balance nicely EQ and IQ. It was trained with some of the latest datasets including:', 'raw': 'We are happy to announce the release of our latest model UNA-ThePitbull, the most powerful model below 70B in the industry. In this new generation, inspired on our previous Beagle series we curated a model that balance nicely EQ and IQ. It was trained with some of the latest datasets including:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Replete-AI/code_bagel_hermes-2.5', 'raw': '* Replete-AI/code_bagel_hermes-2.5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* mlabonne/orpo-dpo-mix-40k', 'raw': '* mlabonne/orpo-dpo-mix-40k'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* jondurbin/py-dpo-v0.1', 'raw': '* jondurbin/py-dpo-v0.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Available in the hub ', 'raw': 'Available in the hub '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'fblgit/UNA-ThePitbull-21.4B-v2'}, 'url': 'https://huggingface.co/fblgit/UNA-ThePitbull-21.4B-v2', 'raw': 'https://huggingface.co/fblgit/UNA-ThePitbull-21.4B-v2'}, {'type': 'text', 'value': ' and you can grab Quant versions sponsored by ', 'raw': ' and you can grab Quant versions sponsored by '}, {'type': 'mention', 'user': 'bartowski', 'raw': '@bartowski'}, {'type': 'text', 'value': ' at ', 'raw': ' at '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'bartowski/UNA-ThePitbull-21.4B-v2-GGUF'}, 'url': 'https://huggingface.co/bartowski/UNA-ThePitbull-21.4B-v2-GGUF', 'raw': 'https://huggingface.co/bartowski/UNA-ThePitbull-21.4B-v2-GGUF'}, {'type': 'text', 'value': ' fully compatible with Ollama, llama.cpp, etc.', 'raw': ' fully compatible with Ollama, llama.cpp, etc.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'UNA', 'raw': 'UNA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this case we tried something new by alternating uniformity across layers of both MLP & Attention reducing computational requirements while keep a high performant result.', 'raw': 'In this case we tried something new by alternating uniformity across layers of both MLP & Attention reducing computational requirements while keep a high performant result.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We trained him under these terms:', 'raw': 'We trained him under these terms:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* ThePitbull-v1 as base: SFT maxLR 1e-4 minLR 5e-5 for 1 Epoch', 'raw': '* ThePitbull-v1 as base: SFT maxLR 1e-4 minLR 5e-5 for 1 Epoch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* DPO maxLR 1e-4 minLR 5e-5 for 1 Epoch', 'raw': '* DPO maxLR 1e-4 minLR 5e-5 for 1 Epoch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can continue the training by merely using 5e-5 maxLR and 0 warmup steps, it should minimize catastrophic forgetting of the model.', 'raw': 'You can continue the training by merely using 5e-5 maxLR and 0 warmup steps, it should minimize catastrophic forgetting of the model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Remember if you do so, please include a Pitbull picture on your model and cite :) Have fun!', 'raw': 'Remember if you do so, please include a Pitbull picture on your model and cite :) Have fun!'}, {'type': 'new_line', 'raw': '\n'}]","Introducing UNA-ThePitbull Series + +We are happy to announce the release of our latest model UNA-ThePitbull, the most powerful model below 70B in the industry. In this new generation, inspired on our previous Beagle series we curated a model that balance nicely EQ and IQ. It was trained with some of the latest datasets including: +* Replete-AI/code_bagel_hermes-2.5 +* mlabonne/orpo-dpo-mix-40k +* jondurbin/py-dpo-v0.1 +Available in the hub https://huggingface.co/fblgit/UNA-ThePitbull-21.4B-v2 and you can grab Quant versions sponsored by @bartowski at https://huggingface.co/bartowski/UNA-ThePitbull-21.4B-v2-GGUF fully compatible with Ollama, llama.cpp, etc. + +UNA +In this case we tried something new by alternating uniformity across layers of both MLP & Attention reducing computational requirements while keep a high performant result. + +We trained him under these terms: +* ThePitbull-v1 as base: SFT maxLR 1e-4 minLR 5e-5 for 1 Epoch +* DPO maxLR 1e-4 minLR 5e-5 for 1 Epoch +You can continue the training by merely using 5e-5 maxLR and 0 warmup steps, it should minimize catastrophic forgetting of the model. + +Remember if you do so, please include a Pitbull picture on your model and cite :) Have fun! +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6401c8c9f98fbc64bcd7dca1/VNV9edpvMYm00Y3aA_pdu.png'}]","[{'_id': '6435718aaaef013d1aec3b8b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6435718aaaef013d1aec3b8b/XKf-8MA47tjVAM6SCX0MP.jpeg', 'fullname': 'Bartowski', 'name': 'bartowski', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7068}]","[{'reaction': '🔥', 'users': ['bartowski', 'Tonic', 'Yhyu13'], 'count': 3}, {'reaction': '🚀', 'users': ['thesven'], 'count': 1}]",2024-06-01 12:52:55,2024-06-01 12:52:55.094,[],/posts/fblgit/862353567134059,2614,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,372309956389715,"[{'type': 'text', 'value': 'ChemLLM Multi-Modal version will coming soon!', 'raw': 'ChemLLM Multi-Modal version will coming soon!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also Weights and Datasets!', 'raw': 'Also Weights and Datasets!'}]","ChemLLM Multi-Modal version will coming soon! +Also Weights and Datasets!",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'osanseviero', 'Ramikan-BR', 'taufiqdp', 'whitebill', 'Tonic', 'Hev832', 'mrmuminov', 'Yhyu13', 'louisbrulenaudet'], 'count': 10}, {'reaction': '🚀', 'users': ['Ramikan-BR', 'Tonic', 'Yhyu13', 'eljanmahammadli'], 'count': 4}, {'reaction': '👀', 'users': ['Ramikan-BR', 'Tonic', 'Yhyu13'], 'count': 3}]",2024-06-01 10:06:38,2024-06-02 09:14:14.699,"[{'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}, {'_id': '664a23d0cf5fd472d77ad034', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/PKzQSYUHCgFFVYPeeNGYy.png', 'fullname': 'DynaTech Systems', 'name': 'DynaTechSystems', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/di-zhang-fdu/372309956389715,2077,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,375095993392947,"[{'type': 'text', 'value': 'Remember Gemini, GPT-4o, all being true multimodal models 🌟.', 'raw': 'Remember Gemini, GPT-4o, all being true multimodal models 🌟.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now we have a paper 📄 describing an architecture that might achieve that!', 'raw': 'Now we have a paper 📄 describing an architecture that might achieve that!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Uni-MoE: a native multimodal, Unified Mixture of Experts (MoE) architecture 🏗️.', 'raw': 'Uni-MoE: a native multimodal, Unified Mixture of Experts (MoE) architecture 🏗️.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Uni-MoE integrates various modalities (text 📝, image 🖼️, audio 🎵, video 📹, speech 🗣️) using modality-specific encoders and connectors for a cohesive multimodal understanding.', 'raw': 'Uni-MoE integrates various modalities (text 📝, image 🖼️, audio 🎵, video 📹, speech 🗣️) using modality-specific encoders and connectors for a cohesive multimodal understanding.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Training Strategy:', 'raw': 'Training Strategy:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Training cross-modality alignment with diverse connectors 🔄.', 'raw': '1️⃣ Training cross-modality alignment with diverse connectors 🔄.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Training modality-specific experts using cross-modality instruction data 📊.', 'raw': '2️⃣ Training modality-specific experts using cross-modality instruction data 📊.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣Tuning the Uni-MoE framework with Low-Rank Adaptation (LoRA) on mixed multimodal data 🔧.', 'raw': '3️⃣Tuning the Uni-MoE framework with Low-Rank Adaptation (LoRA) on mixed multimodal data 🔧.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Technical Details:', 'raw': 'Technical Details:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Modality-Specific Encoders: CLIP for images 🖼️, Whisper for speech 🗣️, BEATs for audio 🎵.', 'raw': 'Modality-Specific Encoders: CLIP for images 🖼️, Whisper for speech 🗣️, BEATs for audio 🎵.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MoE-Based Blocks: Shared self-attention layers, feed-forward networks (FFN) based experts, and sparse routers for token-level expertise allocation 🚀.', 'raw': 'MoE-Based Blocks: Shared self-attention layers, feed-forward networks (FFN) based experts, and sparse routers for token-level expertise allocation 🚀.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Efficient Training: Utilizes LoRA for fine-tuning pre-trained experts and self-attention layers 🛠️.', 'raw': 'Efficient Training: Utilizes LoRA for fine-tuning pre-trained experts and self-attention layers 🛠️.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Uni-MoE outperforms traditional dense models on benchmarks like A-OKVQA, OK-VQA, VQAv2, MMBench, RACE-Audio, and English High School Listening Test 🏆.', 'raw': 'Uni-MoE outperforms traditional dense models on benchmarks like A-OKVQA, OK-VQA, VQAv2, MMBench, RACE-Audio, and English High School Listening Test 🏆.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The code is open-sourced as well: ', 'raw': 'The code is open-sourced as well: '}, {'type': 'link', 'href': 'https://github.com/HITsz-TMG/UMOE-Scaling-Unified-Multimodal-LLMs/tree/master/Uni_MoE_v2', 'raw': 'https://github.com/HITsz-TMG/UMOE-Scaling-Unified-Multimodal-LLMs/tree/master/Uni_MoE_v2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.11273'}, 'url': 'https://huggingface.co/papers/2405.11273', 'raw': 'https://huggingface.co/papers/2405.11273', 'label': 'Uni-MoE: Scaling Unified Multimodal LLMs with Mixture of Experts (2405.11273)'}]","Remember Gemini, GPT-4o, all being true multimodal models 🌟. + +Now we have a paper 📄 describing an architecture that might achieve that! + +Uni-MoE: a native multimodal, Unified Mixture of Experts (MoE) architecture 🏗️. + +Uni-MoE integrates various modalities (text 📝, image 🖼️, audio 🎵, video 📹, speech 🗣️) using modality-specific encoders and connectors for a cohesive multimodal understanding. + +Training Strategy: +1️⃣ Training cross-modality alignment with diverse connectors 🔄. +2️⃣ Training modality-specific experts using cross-modality instruction data 📊. +3️⃣Tuning the Uni-MoE framework with Low-Rank Adaptation (LoRA) on mixed multimodal data 🔧. + +Technical Details: + +Modality-Specific Encoders: CLIP for images 🖼️, Whisper for speech 🗣️, BEATs for audio 🎵. + +MoE-Based Blocks: Shared self-attention layers, feed-forward networks (FFN) based experts, and sparse routers for token-level expertise allocation 🚀. + +Efficient Training: Utilizes LoRA for fine-tuning pre-trained experts and self-attention layers 🛠️. + +Uni-MoE outperforms traditional dense models on benchmarks like A-OKVQA, OK-VQA, VQAv2, MMBench, RACE-Audio, and English High School Listening Test 🏆. + +The code is open-sourced as well: https://github.com/HITsz-TMG/UMOE-Scaling-Unified-Multimodal-LLMs/tree/master/Uni_MoE_v2 + +Paper: https://huggingface.co/papers/2405.11273","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/EdjQkmLpB8o0DRaVWb-k3.jpeg'}]",[],"[{'reaction': '🤗', 'users': ['Lumpen1', 'alielfilali01', 'gilangf3000'], 'count': 3}, {'reaction': '🚀', 'users': ['osanseviero', 'alielfilali01'], 'count': 2}, {'reaction': '🧠', 'users': ['alielfilali01'], 'count': 1}]",2024-05-31 21:42:25,2024-05-31 21:42:25.714,[],/posts/singhsidhukuldeep/375095993392947,1529,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65bb837dbfb878f46c77de4c/PKyQ_-wTNH1Hyv5HxhWdX.jpeg,1957.0,Prithiv Sakthi,prithivMLmods,733013042582862,"[{'type': 'text', 'value': '#Newer / Current Version', 'raw': '#Newer / Current Version'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚨Huggingface APK Update v0.0.4🚨', 'raw': '🚨Huggingface APK Update v0.0.4🚨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Fixed Pinch to Zoom Update .', 'raw': '1. Fixed Pinch to Zoom Update .'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Swipe Gestures.', 'raw': '2. Swipe Gestures.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Fixed Auto Rotate.', 'raw': '3. Fixed Auto Rotate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Updated app Indentifiers.', 'raw': '4. Updated app Indentifiers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Download the app now !!', 'raw': 'Download the app now !!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚨Huggingface v0.0.4 Download,', 'raw': '🚨Huggingface v0.0.4 Download,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⬇️Link : ', 'raw': '⬇️Link : '}, {'type': 'link', 'href': 'https://drive.google.com/file/d/1xEiH7LMdP14fBG-xDuSqKje5TRLV1PuS/view?usp=sharing', 'raw': 'https://drive.google.com/file/d/1xEiH7LMdP14fBG-xDuSqKje5TRLV1PuS/view?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Like 👍Share 🚀 Follow 🌠', 'raw': 'Like 👍Share 🚀 Follow 🌠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","#Newer / Current Version +🚨Huggingface APK Update v0.0.4🚨 +1. Fixed Pinch to Zoom Update . +2. Swipe Gestures. +3. Fixed Auto Rotate. +4. Updated app Indentifiers. + +Download the app now !! +🚨Huggingface v0.0.4 Download, +⬇️Link : https://drive.google.com/file/d/1xEiH7LMdP14fBG-xDuSqKje5TRLV1PuS/view?usp=sharing + +Like 👍Share 🚀 Follow 🌠 + +",[],[],"[{'reaction': '➕', 'users': ['prithivMLmods', 'Speedk4011', 'belalmasood5', 'Emilllion', 'ill1ogick', 'TinoBhavoindian', 'aryanabdolahi'], 'count': 7}]",2024-05-31 17:30:06,2024-06-01 18:02:53.687,[],/posts/prithivMLmods/733013042582862,7779,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg,1222.0,Nishith Jain,KingNish,786350827380996,"[{'type': 'text', 'value': 'I am pleased to announce 2 amazing AI demos:', 'raw': 'I am pleased to announce 2 amazing AI demos:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Chat with Google Agent - This includes three AI models that allow you to converse with an AI, which provides answers by searching Google.', 'raw': '1. Chat with Google Agent - This includes three AI models that allow you to converse with an AI, which provides answers by searching Google.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo Link: ', 'raw': 'Demo Link: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'poscye/google-go'}, 'url': 'https://huggingface.co/spaces/poscye/google-go', 'raw': 'https://huggingface.co/spaces/poscye/google-go'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. HelpingAI 9B - A model that surpassed all top AIs with the highest EQ benchmark score of 89.23. It specializes in understanding human emotions and responding in human style.', 'raw': '2. HelpingAI 9B - A model that surpassed all top AIs with the highest EQ benchmark score of 89.23. It specializes in understanding human emotions and responding in human style.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo Link: ', 'raw': 'Demo Link: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/Abhaykoul/HelpingAI-9B', 'raw': 'https://huggingface.co/spaces/Abhaykoul/HelpingAI-9B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model Link: ', 'raw': 'Model Link: '}, {'type': 'link', 'href': 'https://huggingface.co/OEvortex/HelpingAI-9B', 'raw': 'https://huggingface.co/OEvortex/HelpingAI-9B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog Link: ', 'raw': 'Blog Link: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/KingNish/helpingai-9b', 'raw': 'https://huggingface.co/blog/KingNish/helpingai-9b'}]","I am pleased to announce 2 amazing AI demos: + +1. Chat with Google Agent - This includes three AI models that allow you to converse with an AI, which provides answers by searching Google. +Demo Link: https://huggingface.co/spaces/poscye/google-go + +2. HelpingAI 9B - A model that surpassed all top AIs with the highest EQ benchmark score of 89.23. It specializes in understanding human emotions and responding in human style. +Demo Link: https://huggingface.co/spaces/Abhaykoul/HelpingAI-9B +Model Link: https://huggingface.co/OEvortex/HelpingAI-9B +Blog Link: https://huggingface.co/blog/KingNish/helpingai-9b",[],[],"[{'reaction': '👍', 'users': ['Lumpen1', 'pabloce', 'ijohn07', 'Rusky1234', 'osanseviero', 'Hev832', 'rreed-pha', 'Lou-stic'], 'count': 8}, {'reaction': '❤️', 'users': ['pabloce', 'aceeee', 'PifPaf', 'louisbrulenaudet'], 'count': 4}, {'reaction': '🚀', 'users': ['pabloce'], 'count': 1}, {'reaction': '😎', 'users': ['pabloce'], 'count': 1}, {'reaction': '🔥', 'users': ['pabloce'], 'count': 1}]",2024-05-31 15:32:43,2024-06-02 09:46:26.266,"[{'_id': '64e46dcdb78bc92221aa5c1b', 'avatarUrl': '/avatars/1a2ca2ddd0b7b3c4fe93ccfc89f97752.svg', 'fullname': 'Shruti Dhange', 'name': 'shrutidhange', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}]",/posts/KingNish/786350827380996,6489,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1658166666371-noauth.png,44.0,Stepanov,Ihor,885684618092437,"[{'type': 'text', 'value': 'We are pleased to announce the new line of universal token classification models 🔥', 'raw': 'We are pleased to announce the new line of universal token classification models 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'knowledgator/universal-token-classification-65a3a5d3f266d20b2e05c34d'}, 'url': 'https://huggingface.co/collections/knowledgator/universal-token-classification-65a3a5d3f266d20b2e05c34d', 'raw': 'https://huggingface.co/collections/knowledgator/universal-token-classification-65a3a5d3f266d20b2e05c34d'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It can perform various information extraction tasks by analysing input prompts and recognizing parts of texts that satisfy prompts. In comparison with the first version, the second one is more general and can be recognised as entities, whole sentences, and even paragraphs.', 'raw': 'It can perform various information extraction tasks by analysing input prompts and recognizing parts of texts that satisfy prompts. In comparison with the first version, the second one is more general and can be recognised as entities, whole sentences, and even paragraphs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The model can be used for the following tasks:', 'raw': 'The model can be used for the following tasks:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Named entity recognition (NER);', 'raw': '* Named entity recognition (NER);'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Open information extraction;', 'raw': '* Open information extraction;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Question answering;', 'raw': '* Question answering;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Relation extraction;', 'raw': '* Relation extraction;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Coreference resolution;', 'raw': '* Coreference resolution;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Text cleaning;', 'raw': '* Text cleaning;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Summarization;', 'raw': '* Summarization;'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How to use:', 'raw': 'How to use:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'from utca.core import (\n AddData,\n RenameAttribute,\n Flush\n)\nfrom utca.implementation.predictors import (\n TokenSearcherPredictor, TokenSearcherPredictorConfig\n)\nfrom utca.implementation.tasks import (\n TokenSearcherNER,\n TokenSearcherNERPostprocessor,\n)\npredictor = TokenSearcherPredictor(\n TokenSearcherPredictorConfig(\n device=""cuda:0"",\n model=""knowledgator/UTC-DeBERTa-base-v2""\n )\n)\nner_task = TokenSearcherNER(\n predictor=predictor,\n postprocess=[TokenSearcherNERPostprocessor(\n threshold=0.5\n )]\n)\n\nner_task = TokenSearcherNER()\n\npipeline = ( \n AddData({""labels"": [""scientist"", ""university"", ""city""]}) \n | ner_task\n | Flush(keys=[""labels""])\n | RenameAttribute(""output"", ""entities"")\n)\nres = pipeline.run({\n ""text"": """"""Dr. Paul Hammond, a renowned neurologist at Johns Hopkins University, has recently published a paper in the prestigious journal ""Nature Neuroscience"". """"""\n})', 'raw': '```\nfrom utca.core import (\n AddData,\n RenameAttribute,\n Flush\n)\nfrom utca.implementation.predictors import (\n TokenSearcherPredictor, TokenSearcherPredictorConfig\n)\nfrom utca.implementation.tasks import (\n TokenSearcherNER,\n TokenSearcherNERPostprocessor,\n)\npredictor = TokenSearcherPredictor(\n TokenSearcherPredictorConfig(\n device=""cuda:0"",\n model=""knowledgator/UTC-DeBERTa-base-v2""\n )\n)\nner_task = TokenSearcherNER(\n predictor=predictor,\n postprocess=[TokenSearcherNERPostprocessor(\n threshold=0.5\n )]\n)\n\nner_task = TokenSearcherNER()\n\npipeline = ( \n AddData({""labels"": [""scientist"", ""university"", ""city""]}) \n | ner_task\n | Flush(keys=[""labels""])\n | RenameAttribute(""output"", ""entities"")\n)\nres = pipeline.run({\n ""text"": """"""Dr. Paul Hammond, a renowned neurologist at Johns Hopkins University, has recently published a paper in the prestigious journal ""Nature Neuroscience"". """"""\n})\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","We are pleased to announce the new line of universal token classification models 🔥 + +https://huggingface.co/collections/knowledgator/universal-token-classification-65a3a5d3f266d20b2e05c34d + +It can perform various information extraction tasks by analysing input prompts and recognizing parts of texts that satisfy prompts. In comparison with the first version, the second one is more general and can be recognised as entities, whole sentences, and even paragraphs. + +The model can be used for the following tasks: +* Named entity recognition (NER); +* Open information extraction; +* Question answering; +* Relation extraction; +* Coreference resolution; +* Text cleaning; +* Summarization; + +How to use: + +``` +from utca.core import ( + AddData, + RenameAttribute, + Flush +) +from utca.implementation.predictors import ( + TokenSearcherPredictor, TokenSearcherPredictorConfig +) +from utca.implementation.tasks import ( + TokenSearcherNER, + TokenSearcherNERPostprocessor, +) +predictor = TokenSearcherPredictor( + TokenSearcherPredictorConfig( + device=""cuda:0"", + model=""knowledgator/UTC-DeBERTa-base-v2"" + ) +) +ner_task = TokenSearcherNER( + predictor=predictor, + postprocess=[TokenSearcherNERPostprocessor( + threshold=0.5 + )] +) + +ner_task = TokenSearcherNER() + +pipeline = ( + AddData({""labels"": [""scientist"", ""university"", ""city""]}) + | ner_task + | Flush(keys=[""labels""]) + | RenameAttribute(""output"", ""entities"") +) +res = pipeline.run({ + ""text"": """"""Dr. Paul Hammond, a renowned neurologist at Johns Hopkins University, has recently published a paper in the prestigious journal ""Nature Neuroscience"". """""" +}) +``` + +",[],[],"[{'reaction': '👍', 'users': ['KingNish', 'GPT007', 'la-min', 'osanseviero', 'Joseph717171', 'Ramikan-BR', 'a9i'], 'count': 7}, {'reaction': '🔥', 'users': ['Citaman', 'cansa', 'Ihor', 'Ramikan-BR'], 'count': 4}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}]",2024-05-31 14:53:35,2024-05-31 14:54:15.915,[],/posts/Ihor/885684618092437,1903,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,505714323175985,"[{'type': 'text', 'value': '𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗮𝗻 𝗮𝗴𝗲𝗻𝘁𝗶𝗰 𝘄𝗼𝗿𝗸𝗳𝗹𝗼𝘄 𝘂𝘀𝗲 𝗶𝘁𝘀 𝗟𝗟𝗠 𝗲𝗻𝗴𝗶𝗻𝗲 𝘁𝗼 𝘀𝗼𝗹𝘃𝗲 𝘁𝗮𝘀𝗸𝘀?', 'raw': '𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗮𝗻 𝗮𝗴𝗲𝗻𝘁𝗶𝗰 𝘄𝗼𝗿𝗸𝗳𝗹𝗼𝘄 𝘂𝘀𝗲 𝗶𝘁𝘀 𝗟𝗟𝗠 𝗲𝗻𝗴𝗶𝗻𝗲 𝘁𝗼 𝘀𝗼𝗹𝘃𝗲 𝘁𝗮𝘀𝗸𝘀?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ I made my first ever 𝘮𝘢𝘯𝘪𝘮 video to show just that:', 'raw': '➡️ I made my first ever 𝘮𝘢𝘯𝘪𝘮 video to show just that:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗪𝗮𝘁𝗰𝗵 𝗯𝗲𝗹𝗼𝘄 𝗵𝗼𝘄 𝗮 𝗥𝗲𝗮𝗰𝘁 𝗔𝗴𝗲𝗻𝘁 𝘀𝗼𝗹𝘃𝗲𝘀 𝗮 𝘀𝗶𝗺𝗽𝗹𝗲 𝘁𝗮𝘀𝗸, by leveraging its memory to iterate on previous actions! 🎬👇', 'raw': '𝗪𝗮𝘁𝗰𝗵 𝗯𝗲𝗹𝗼𝘄 𝗵𝗼𝘄 𝗮 𝗥𝗲𝗮𝗰𝘁 𝗔𝗴𝗲𝗻𝘁 𝘀𝗼𝗹𝘃𝗲𝘀 𝗮 𝘀𝗶𝗺𝗽𝗹𝗲 𝘁𝗮𝘀𝗸, by leveraging its memory to iterate on previous actions! 🎬👇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read our blog post on Agents: ', 'raw': 'Read our blog post on Agents: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/agents', 'raw': 'https://huggingface.co/blog/agents'}]","𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗮𝗻 𝗮𝗴𝗲𝗻𝘁𝗶𝗰 𝘄𝗼𝗿𝗸𝗳𝗹𝗼𝘄 𝘂𝘀𝗲 𝗶𝘁𝘀 𝗟𝗟𝗠 𝗲𝗻𝗴𝗶𝗻𝗲 𝘁𝗼 𝘀𝗼𝗹𝘃𝗲 𝘁𝗮𝘀𝗸𝘀? + +➡️ I made my first ever 𝘮𝘢𝘯𝘪𝘮 video to show just that: + +𝗪𝗮𝘁𝗰𝗵 𝗯𝗲𝗹𝗼𝘄 𝗵𝗼𝘄 𝗮 𝗥𝗲𝗮𝗰𝘁 𝗔𝗴𝗲𝗻𝘁 𝘀𝗼𝗹𝘃𝗲𝘀 𝗮 𝘀𝗶𝗺𝗽𝗹𝗲 𝘁𝗮𝘀𝗸, by leveraging its memory to iterate on previous actions! 🎬👇 + +Read our blog post on Agents: https://huggingface.co/blog/agents","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/i-F4wkBjgWQiei3WWvCJG.mp4'}]",[],"[{'reaction': '🔥', 'users': ['lunarflu', 'nbroad', 'GPT007', 'umair894', 'Hev832', 'not-lain'], 'count': 6}]",2024-05-31 12:59:31,2024-06-02 01:08:01.641,"[{'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}]",/posts/m-ric/505714323175985,1847,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65c992424936ab38ecf706b0/aq7vuHFPO1S93fwJk0Cuq.jpeg,221.0,Jim Lai,grimjim,432123745536924,"[{'type': 'text', 'value': 'I propose ""merge densification"", a style of merger which attempts to transfer the benefits of a denser model to a base model. The model weight in this case is 0.02, which is atypically small for mergers, but high compared to the learning rate used during training. In this case, the expectation is more creative text-generation. More details below:', 'raw': 'I propose ""merge densification"", a style of merger which attempts to transfer the benefits of a denser model to a base model. The model weight in this case is 0.02, which is atypically small for mergers, but high compared to the learning rate used during training. In this case, the expectation is more creative text-generation. More details below:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'grimjim/kunoichi-lemon-royale-v3-32K-7B'}, 'url': 'https://huggingface.co/grimjim/kunoichi-lemon-royale-v3-32K-7B', 'raw': 'https://huggingface.co/grimjim/kunoichi-lemon-royale-v3-32K-7B'}]","I propose ""merge densification"", a style of merger which attempts to transfer the benefits of a denser model to a base model. The model weight in this case is 0.02, which is atypically small for mergers, but high compared to the learning rate used during training. In this case, the expectation is more creative text-generation. More details below: +https://huggingface.co/grimjim/kunoichi-lemon-royale-v3-32K-7B",[],[],"[{'reaction': '👀', 'users': ['osanseviero', 'louisbrulenaudet', 'victor', 'SerialKicked'], 'count': 4}, {'reaction': '👍', 'users': ['SerialKicked'], 'count': 1}]",2024-05-28 04:09:54,2024-05-28 18:13:34.200,[],/posts/grimjim/432123745536924,1693,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,561208015517169,"[{'type': 'text', 'value': 'Remember stacking in ensemble ML? 🤔', 'raw': 'Remember stacking in ensemble ML? 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What happens if you do the reverse of that but with LLMs? 🤯', 'raw': 'What happens if you do the reverse of that but with LLMs? 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Basically, MoE created by merging multiple models (instead of being pre-trained like Mixtral)? 🧠', 'raw': 'Basically, MoE created by merging multiple models (instead of being pre-trained like Mixtral)? 🧠'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Frankenstein MoE! (not an official name) 🧟\u200d♂️', 'raw': 'Frankenstein MoE! (not an official name) 🧟\u200d♂️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""That's the new Kraken architecture! 🐙"", 'raw': ""That's the new Kraken architecture! 🐙""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It uses a sequence classification model to route inputs to the most suitable language model based on the input's characteristics. 🚦"", 'raw': ""It uses a sequence classification model to route inputs to the most suitable language model based on the input's characteristics. 🚦""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Yup, multiple full-fledged LLMs are loaded into memory, and then a classification layer decides who gets to generate an output! 🎰', 'raw': 'Yup, multiple full-fledged LLMs are loaded into memory, and then a classification layer decides who gets to generate an output! 🎰'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tell me you have too many GPUs without telling me you have too many GPUs! 🖥️🔥', 'raw': 'Tell me you have too many GPUs without telling me you have too many GPUs! 🖥️🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Jokes aside, extremely fascinating research but I don't understand why this can't just be a big model with multiple LORA adapters, that can be decided on the fly? 🤷\u200d♂️"", 'raw': ""Jokes aside, extremely fascinating research but I don't understand why this can't just be a big model with multiple LORA adapters, that can be decided on the fly? 🤷\u200d♂️""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'cognitivecomputations/Kraken'}, 'url': 'https://huggingface.co/cognitivecomputations/Kraken', 'raw': 'https://huggingface.co/cognitivecomputations/Kraken'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Github: ', 'raw': 'Github: '}, {'type': 'link', 'href': 'https://github.com/cognitivecomputations/kraken', 'raw': 'https://github.com/cognitivecomputations/kraken'}, {'type': 'new_line', 'raw': '\n'}]","Remember stacking in ensemble ML? 🤔 + +What happens if you do the reverse of that but with LLMs? 🤯 + +Basically, MoE created by merging multiple models (instead of being pre-trained like Mixtral)? 🧠 + +Frankenstein MoE! (not an official name) 🧟‍♂️ + +That's the new Kraken architecture! 🐙 + +It uses a sequence classification model to route inputs to the most suitable language model based on the input's characteristics. 🚦 + +Yup, multiple full-fledged LLMs are loaded into memory, and then a classification layer decides who gets to generate an output! 🎰 + +Tell me you have too many GPUs without telling me you have too many GPUs! 🖥️🔥 + +Jokes aside, extremely fascinating research but I don't understand why this can't just be a big model with multiple LORA adapters, that can be decided on the fly? 🤷‍♂️ + +Model: https://huggingface.co/cognitivecomputations/Kraken +Github: https://github.com/cognitivecomputations/kraken +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/aQCt2NhFnp_PTRrOKVPrR.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['NickyNicky'], 'count': 1}]",2024-05-27 20:32:54,2024-05-29 11:27:15.806,"[{'_id': '64da9e4bdafcf7595956abcf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64da9e4bdafcf7595956abcf/Wf8o2zTUEghptxk-ao_43.jpeg', 'fullname': 'Alex Sherstinsky', 'name': 'alexsherstinsky', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7, 'isFollowing': False}, {'_id': '635efe2b398ff343c4fa209b', 'avatarUrl': '/avatars/53ebfcab852efd849a848a26dc65751c.svg', 'fullname': 'elsatch', 'name': 'elsatch', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}]",/posts/singhsidhukuldeep/561208015517169,1052,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,505150496684783,"[{'type': 'text', 'value': ""If you're part of the Journalists on Hugging Face community, did you know you can receive notifications on ongoing discussions? "", 'raw': ""If you're part of the Journalists on Hugging Face community, did you know you can receive notifications on ongoing discussions? ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ""Repo discussions"" for repo discussions you\'re participating in or mentioned in', 'raw': '- ""Repo discussions"" for repo discussions you\'re participating in or mentioned in'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ""New activity on watched orgs/users"" for repo discussions & posts from users & orgs', 'raw': '- ""New activity on watched orgs/users"" for repo discussions & posts from users & orgs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""you're watching"", 'raw': ""you're watching""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Activate them here: ', 'raw': 'Activate them here: '}, {'type': 'link', 'href': 'https://huggingface.co/settings/notifications', 'raw': 'https://huggingface.co/settings/notifications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Join the community: If you’re part of the Journalists on Hugging Face community, did you know you can receive notifications about ongoing discussions?', 'raw': 'Join the community: If you’re part of the Journalists on Hugging Face community, did you know you can receive notifications about ongoing discussions?'}]","If you're part of the Journalists on Hugging Face community, did you know you can receive notifications on ongoing discussions? + +- ""Repo discussions"" for repo discussions you're participating in or mentioned in +- ""New activity on watched orgs/users"" for repo discussions & posts from users & orgs +you're watching + +Activate them here: https://huggingface.co/settings/notifications + +Join the community: If you’re part of the Journalists on Hugging Face community, did you know you can receive notifications about ongoing discussions?","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/YyGHsAwrNtRFtXQ4GVC5a.png'}]",[],"[{'reaction': '👀', 'users': ['osanseviero', 'lunarflu'], 'count': 2}]",2024-05-27 18:43:30,2024-05-27 18:43:30.838,[],/posts/fdaudens/505150496684783,967,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6459fa0f5b3111fbe83286e1/E6Buqu8Wd9WmIHKOCZXCc.jpeg,235.0,Louis Brulé Naudet,louisbrulenaudet,607293668883891,"[{'type': 'text', 'value': ""I've just open sourced RAGoon, a small utility I use to integrate knowledge from the web into LLM inference based on Groq speed and pure Google search performance ⚡"", 'raw': ""I've just open sourced RAGoon, a small utility I use to integrate knowledge from the web into LLM inference based on Groq speed and pure Google search performance ⚡""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RAGoon is a Python library available on PyPI that aims to improve the performance of language models by providing contextually relevant information through retrieval-based querying, parallel web scraping, and data augmentation techniques. It offers an integration of various APIs (OpenAI, Groq), enabling users to retrieve information from the web, enrich it with domain-specific knowledge, and feed it to language models for more informed responses.', 'raw': 'RAGoon is a Python library available on PyPI that aims to improve the performance of language models by providing contextually relevant information through retrieval-based querying, parallel web scraping, and data augmentation techniques. It offers an integration of various APIs (OpenAI, Groq), enabling users to retrieve information from the web, enrich it with domain-specific knowledge, and feed it to language models for more informed responses.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'python', 'code': 'from groq import Groq\n# from openai import OpenAI\nfrom ragoon import RAGoon\n\n# Initialize RAGoon instance\nragoon = RAGoon(\n google_api_key=""your_google_api_key"",\n google_cx=""your_google_cx"",\n completion_client=Groq(api_key=""your_groq_api_key"")\n)\n\n# Search and get results\nquery = ""I want to do a left join in python polars""\nresults = ragoon.search(\n query=query,\n completion_model=""Llama3-70b-8192"",\n)\n\n# Print list of results\nprint(results)', 'raw': '```python\nfrom groq import Groq\n# from openai import OpenAI\nfrom ragoon import RAGoon\n\n# Initialize RAGoon instance\nragoon = RAGoon(\n google_api_key=""your_google_api_key"",\n google_cx=""your_google_cx"",\n completion_client=Groq(api_key=""your_groq_api_key"")\n)\n\n# Search and get results\nquery = ""I want to do a left join in python polars""\nresults = ragoon.search(\n query=query,\n completion_model=""Llama3-70b-8192"",\n)\n\n# Print list of results\nprint(results)\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For the time being, this project remains simple, but can easily be integrated into a RAG pipeline.', 'raw': 'For the time being, this project remains simple, but can easily be integrated into a RAG pipeline.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to GitHub : ', 'raw': 'Link to GitHub : '}, {'type': 'link', 'href': 'https://github.com/louisbrulenaudet/ragoon', 'raw': 'https://github.com/louisbrulenaudet/ragoon'}]","I've just open sourced RAGoon, a small utility I use to integrate knowledge from the web into LLM inference based on Groq speed and pure Google search performance ⚡ + +RAGoon is a Python library available on PyPI that aims to improve the performance of language models by providing contextually relevant information through retrieval-based querying, parallel web scraping, and data augmentation techniques. It offers an integration of various APIs (OpenAI, Groq), enabling users to retrieve information from the web, enrich it with domain-specific knowledge, and feed it to language models for more informed responses. +```python +from groq import Groq +# from openai import OpenAI +from ragoon import RAGoon + +# Initialize RAGoon instance +ragoon = RAGoon( + google_api_key=""your_google_api_key"", + google_cx=""your_google_cx"", + completion_client=Groq(api_key=""your_groq_api_key"") +) + +# Search and get results +query = ""I want to do a left join in python polars"" +results = ragoon.search( + query=query, + completion_model=""Llama3-70b-8192"", +) + +# Print list of results +print(results) +``` +For the time being, this project remains simple, but can easily be integrated into a RAG pipeline. + +Link to GitHub : https://github.com/louisbrulenaudet/ragoon","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6459fa0f5b3111fbe83286e1/ItloGN4cE0rSGfQxj8utl.png'}]",[],"[{'reaction': '🚀', 'users': ['KvrParaskevi', 'lunarflu', 'not-lain'], 'count': 3}]",2024-05-27 18:33:23,2024-05-27 18:33:23.872,[],/posts/louisbrulenaudet/607293668883891,986,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,378045093988402,"[{'type': 'text', 'value': '🚀🎭🌟 New Research Alert - InstructAvatar (Avatars Collection)! 🌟🎭🚀', 'raw': '🚀🎭🌟 New Research Alert - InstructAvatar (Avatars Collection)! 🌟🎭🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: InstructAvatar: Text-Guided Emotion and Motion Control for Avatar Generation 🔝', 'raw': '📄 Title: InstructAvatar: Text-Guided Emotion and Motion Control for Avatar Generation 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: InstructAvatar is a novel method for generating emotionally expressive 2D avatars using text-guided instructions, offering improved emotion control, lip-sync quality, and naturalness. It uses a two-branch diffusion-based generator to predict avatars based on both audio and text input.', 'raw': '📝 Description: InstructAvatar is a novel method for generating emotionally expressive 2D avatars using text-guided instructions, offering improved emotion control, lip-sync quality, and naturalness. It uses a two-branch diffusion-based generator to predict avatars based on both audio and text input.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Yuchi Wang et al.', 'raw': '👥 Authors: Yuchi Wang et al.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.15758'}, 'url': 'https://huggingface.co/papers/2405.15758', 'raw': 'https://huggingface.co/papers/2405.15758', 'label': 'InstructAvatar: Text-Guided Emotion and Motion Control for Avatar\n Generation (2405.15758)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://wangyuchi369.github.io/InstructAvatar/', 'raw': 'https://wangyuchi369.github.io/InstructAvatar/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/wangyuchi369/InstructAvatar', 'raw': 'https://github.com/wangyuchi369/InstructAvatar'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #InstructAvatar #AvatarGeneration #EmotionControl #FacialMotion #LipSynchronization #NaturalLanguageInterface #DiffusionBasedGenerator #TextGuidedInstructions #2DAvatars #VideoSynthesis #Interactivity #ComputerGraphics #DeepLearning #ComputerVision #Innovation', 'raw': '🔍 Keywords: #InstructAvatar #AvatarGeneration #EmotionControl #FacialMotion #LipSynchronization #NaturalLanguageInterface #DiffusionBasedGenerator #TextGuidedInstructions #2DAvatars #VideoSynthesis #Interactivity #ComputerGraphics #DeepLearning #ComputerVision #Innovation'}]","🚀🎭🌟 New Research Alert - InstructAvatar (Avatars Collection)! 🌟🎭🚀 +📄 Title: InstructAvatar: Text-Guided Emotion and Motion Control for Avatar Generation 🔝 + +📝 Description: InstructAvatar is a novel method for generating emotionally expressive 2D avatars using text-guided instructions, offering improved emotion control, lip-sync quality, and naturalness. It uses a two-branch diffusion-based generator to predict avatars based on both audio and text input. + +👥 Authors: Yuchi Wang et al. + +📄 Paper: https://huggingface.co/papers/2405.15758 + +🌐 Github Page: https://wangyuchi369.github.io/InstructAvatar/ +📁 Repository: https://github.com/wangyuchi369/InstructAvatar + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #InstructAvatar #AvatarGeneration #EmotionControl #FacialMotion #LipSynchronization #NaturalLanguageInterface #DiffusionBasedGenerator #TextGuidedInstructions #2DAvatars #VideoSynthesis #Interactivity #ComputerGraphics #DeepLearning #ComputerVision #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/YYnmWDkaIkVF2Dm3UL7cO.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/f4l4KMb8_2IJ2NzVM8YZX.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/DCCpbVbGa_zC1aqADiH0p.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/sKtkIZfOCe2EKQVTsLqpn.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/tNNsJR6SvleDiD-2PvMcn.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Zt5kGu-PFg0KJ3skufmET.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/kMibihTNATfGtyN8pBL9D.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/3ULRLG1zG6S8Ubr5RRwn0.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/azrJi6Cm04z7Zzq6EOGL2.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/9AcopNhySZjMVefevyHSy.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/tNuCPMG6NeKxugLQajLNP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/lpsIIouJLd6-MpFN598Xj.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/U0HmCQA_mjDRGI4P-iwWb.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '🔥', 'users': ['DmitryRyumin', 'fdaudens', 'KvrParaskevi', 'ambientocclusion', 'YuchiWang', 'osanseviero', 'CharileYou', 'seriousran', 'katoernest', 'HugoArraes'], 'count': 10}, {'reaction': '👍', 'users': ['umair894', 'seriousran', 'NeuralKartMocker'], 'count': 3}, {'reaction': '🚀', 'users': ['CharileYou'], 'count': 1}, {'reaction': '🤗', 'users': ['CharileYou'], 'count': 1}, {'reaction': '👀', 'users': ['Tonic'], 'count': 1}]",2024-05-27 16:59:22,2024-05-27 16:59:22.330,[],/posts/DmitryRyumin/378045093988402,859,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/659f000b83abded48e190901/EgA8d-2GDYF41NqWe_rCT.png,227.0,Noa Roggendorff,nroggendorff,768982923455327,"[{'type': 'text', 'value': 'Celebrating 30 likes!! ', 'raw': 'Celebrating 30 likes!! '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'nroggendorff/epicrealismxl'}, 'url': 'https://huggingface.co/spaces/nroggendorff/epicrealismxl', 'raw': 'https://huggingface.co/spaces/nroggendorff/epicrealismxl'}]",Celebrating 30 likes!! https://huggingface.co/spaces/nroggendorff/epicrealismxl,[],[],"[{'reaction': '🔥', 'users': ['Alexandro14', 'KingNish', 'osanseviero', 'lunarflu'], 'count': 4}]",2024-05-27 16:42:52,2024-05-27 16:43:31.150,[],/posts/nroggendorff/768982923455327,721,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,822144949711868,"[{'type': 'text', 'value': '𝙒𝙧𝙞𝙩𝙞𝙣𝙜 𝙩𝙤𝙤𝙡 𝙘𝙖𝙡𝙡𝙨 𝙞𝙣 𝙘𝙤𝙙𝙚 𝙟𝙪𝙨𝙩 𝙬𝙤𝙧𝙠𝙨 𝙗𝙚𝙩𝙩𝙚𝙧 𝙩𝙝𝙖𝙣 𝙅𝙎𝙊𝙉 💪', 'raw': '𝙒𝙧𝙞𝙩𝙞𝙣𝙜 𝙩𝙤𝙤𝙡 𝙘𝙖𝙡𝙡𝙨 𝙞𝙣 𝙘𝙤𝙙𝙚 𝙟𝙪𝙨𝙩 𝙬𝙤𝙧𝙠𝙨 𝙗𝙚𝙩𝙩𝙚𝙧 𝙩𝙝𝙖𝙣 𝙅𝙎𝙊𝙉 💪'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I was really happy to learn today by ', 'raw': 'I was really happy to learn today by '}, {'type': 'mention', 'user': 'sergeipetrov', 'raw': '@sergeipetrov'}, {'type': 'text', 'value': ' that paper 𝘌𝘹𝘦𝘤𝘶𝘵𝘢𝘣𝘭𝘦 𝘊𝘰𝘥𝘦 𝘈𝘤𝘵𝘪𝘰𝘯𝘴 𝘌𝘭𝘪𝘤𝘪𝘵 𝘉𝘦𝘵𝘵𝘦𝘳 𝘓𝘓𝘔 𝘈𝘨𝘦𝘯𝘵𝘴 was accepted at ICLR 2024! ', 'raw': ' that paper 𝘌𝘹𝘦𝘤𝘶𝘵𝘢𝘣𝘭𝘦 𝘊𝘰𝘥𝘦 𝘈𝘤𝘵𝘪𝘰𝘯𝘴 𝘌𝘭𝘪���𝘪𝘵 𝘉𝘦𝘵𝘵𝘦𝘳 𝘓𝘓𝘔 𝘈𝘨𝘦𝘯𝘵𝘴 was accepted at ICLR 2024! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As a reminder, an agent is a system in which you embed a LLM engine, to let it call tools.', 'raw': 'As a reminder, an agent is a system in which you embed a LLM engine, to let it call tools.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""These tools are meant like an IronMan suit, to supplement the LLM in areas that it isn't good at."", 'raw': ""These tools are meant like an IronMan suit, to supplement the LLM in areas that it isn't good at.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧑\u200d💻 For instance your friendly LLM may be terrible at calculating powers of floating numbers (""What is X ^0.2947 ?""), so it should use a calculator.', 'raw': '🧑\u200d💻 For instance your friendly LLM may be terrible at calculating powers of floating numbers (""What is X ^0.2947 ?""), so it should use a calculator.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔎It may be terrible at knowing precise facts (""What was the date of the Golden Bull?"") so it should use a web browser.', 'raw': '🔎It may be terrible at knowing precise facts (""What was the date of the Golden Bull?"") so it should use a web browser.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So the agent system will prompt an agent with ""Now you can use these tools: calculator, search,...""', 'raw': 'So the agent system will prompt an agent with ""Now you can use these tools: calculator, search,...""'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But 𝙝𝙤𝙬 𝙨𝙝𝙤𝙪𝙡𝙙 𝙩𝙝𝙚 𝙖𝙜𝙚𝙣𝙩 𝙚𝙭𝙥𝙧𝙚𝙨𝙨 𝙞𝙩𝙨 𝙖𝙘𝙩𝙞𝙤𝙣𝙨?', 'raw': 'But 𝙝𝙤𝙬 𝙨𝙝𝙤𝙪𝙡𝙙 𝙩𝙝𝙚 𝙖𝙜𝙚𝙣𝙩 𝙚𝙭𝙥𝙧𝙚𝙨𝙨 𝙞𝙩𝙨 𝙖𝙘𝙩𝙞𝙤𝙣𝙨?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All well known frameworks let agents write their actions as JSON strings.', 'raw': 'All well known frameworks let agents write their actions as JSON strings.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We 𝗽𝗿𝗲𝗳𝗲𝗿𝗿𝗲𝗱 𝘁𝗼 𝗴𝗼 𝘄𝗶𝘁𝗵 𝗳𝗼𝗿𝗺𝘂𝗹𝗮𝘁𝗶𝗻𝗴 𝗮𝗰𝘁𝗶𝗼𝗻𝘀 𝗶𝗻 𝗖𝗼𝗱𝗲, 𝘄𝗵𝗶𝗰𝗵 𝗶𝘀 𝗺𝘂𝗰𝗵 𝗺𝗼𝗿𝗲 𝘃𝗲𝗿𝘀𝗮𝘁𝗶𝗹𝗲 𝗮𝗻𝗱 𝗰𝗼𝗻𝗰𝗶𝘀𝗲, 𝗮𝗻𝗱 𝗮𝗹𝗹𝗼𝘄𝘀 𝘁𝗼 𝗰𝗵𝗮𝗶𝗻 𝗮𝗰𝘁𝗶𝗼𝗻𝘀 𝘀𝗲𝗮𝗺𝗹𝗲𝘀𝘀𝗹𝘆: see the picture attached for an example where Code formulation really shines.', 'raw': 'We 𝗽𝗿𝗲𝗳𝗲𝗿𝗿𝗲𝗱 𝘁𝗼 𝗴𝗼 𝘄𝗶𝘁𝗵 𝗳𝗼𝗿𝗺𝘂𝗹𝗮𝘁𝗶𝗻𝗴 𝗮𝗰𝘁𝗶𝗼𝗻𝘀 𝗶𝗻 𝗖𝗼𝗱𝗲, 𝘄𝗵𝗶𝗰𝗵 𝗶𝘀 𝗺𝘂𝗰𝗵 𝗺𝗼𝗿𝗲 𝘃𝗲𝗿𝘀𝗮𝘁𝗶𝗹𝗲 𝗮𝗻𝗱 𝗰𝗼𝗻𝗰𝗶𝘀𝗲, 𝗮𝗻𝗱 𝗮𝗹𝗹𝗼𝘄𝘀 𝘁𝗼 𝗰𝗵𝗮𝗶𝗻 𝗮𝗰𝘁𝗶𝗼𝗻𝘀 𝘀𝗲𝗮𝗺𝗹𝗲𝘀𝘀𝗹𝘆: see the picture attached for an example where Code formulation really shines.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And the paper confirms our choice: researchers show that 𝗰𝗼𝗺𝗽𝗮𝗿𝗲𝗱 𝘁𝗼 𝗝𝗦𝗢𝗡 𝗼𝗿 𝗽𝗹𝗮𝗶𝗻 𝘁𝗲𝘅𝘁, 𝗖𝗼𝗱𝗲 𝗶𝘀 𝗯𝗲𝘁𝘁𝗲𝗿 𝗯𝗼𝘁𝗵 𝗶𝗻 𝗰𝗼𝗻𝗰𝗶𝘀𝗲𝗻𝗲𝘀𝘀 𝗮𝗻𝗱 𝗽𝗲𝗿𝗳𝗼𝗿𝗺𝗮𝗻𝗰𝗲:', 'raw': 'And the paper confirms our choice: researchers show that 𝗰𝗼𝗺𝗽𝗮𝗿𝗲𝗱 𝘁𝗼 𝗝𝗦𝗢𝗡 𝗼𝗿 𝗽𝗹𝗮𝗶𝗻 𝘁𝗲𝘅𝘁, 𝗖𝗼𝗱𝗲 𝗶𝘀 𝗯𝗲𝘁𝘁𝗲𝗿 𝗯𝗼𝘁𝗵 𝗶𝗻 𝗰𝗼𝗻𝗰𝗶𝘀𝗲𝗻𝗲𝘀𝘀 𝗮𝗻𝗱 𝗽𝗲𝗿𝗳𝗼𝗿𝗺𝗮𝗻𝗰𝗲:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➤ Up to 30% fewer steps for the same actions (much more concise)', 'raw': '➤ Up to 30% fewer steps for the same actions (much more concise)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➤ Up to 20% higher performance on benchmarks', 'raw': '➤ Up to 20% higher performance on benchmarks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And we find additional benefits, for instance a natural handling of variables.', 'raw': 'And we find additional benefits, for instance a natural handling of variables.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the paper here 📖 ', 'raw': 'Read the paper here 📖 '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.01030'}, 'url': 'https://huggingface.co/papers/2402.01030', 'raw': 'https://huggingface.co/papers/2402.01030', 'label': 'Executable Code Actions Elicit Better LLM Agents (2402.01030)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Get your ReactCodeAgent running with our Agents framework! 👉 ', 'raw': 'Get your ReactCodeAgent running with our Agents framework! 👉 '}, {'type': 'link', 'href': 'https://huggingface.co/learn/cookbook/agents', 'raw': 'https://huggingface.co/learn/cookbook/agents'}]","𝙒𝙧𝙞𝙩𝙞𝙣𝙜 𝙩𝙤𝙤𝙡 𝙘𝙖𝙡𝙡𝙨 𝙞𝙣 𝙘𝙤𝙙𝙚 𝙟𝙪𝙨𝙩 𝙬𝙤𝙧𝙠𝙨 𝙗𝙚𝙩𝙩𝙚𝙧 𝙩𝙝𝙖𝙣 𝙅𝙎𝙊𝙉 💪 + +I was really happy to learn today by @sergeipetrov that paper 𝘌𝘹𝘦𝘤𝘶𝘵𝘢𝘣𝘭𝘦 𝘊𝘰𝘥𝘦 𝘈𝘤𝘵𝘪𝘰𝘯𝘴 𝘌𝘭𝘪𝘤𝘪𝘵 𝘉𝘦𝘵𝘵𝘦𝘳 𝘓𝘓𝘔 𝘈𝘨𝘦𝘯𝘵𝘴 was accepted at ICLR 2024! + +As a reminder, an agent is a system in which you embed a LLM engine, to let it call tools. + +These tools are meant like an IronMan suit, to supplement the LLM in areas that it isn't good at. +🧑‍💻 For instance your friendly LLM may be terrible at calculating powers of floating numbers (""What is X ^0.2947 ?""), so it should use a calculator. +🔎It may be terrible at knowing precise facts (""What was the date of the Golden Bull?"") so it should use a web browser. + +So the agent system will prompt an agent with ""Now you can use these tools: calculator, search,..."" + +But 𝙝𝙤𝙬 𝙨𝙝𝙤𝙪𝙡𝙙 𝙩𝙝𝙚 𝙖𝙜𝙚𝙣𝙩 𝙚𝙭𝙥𝙧𝙚𝙨𝙨 𝙞𝙩𝙨 𝙖𝙘𝙩𝙞𝙤𝙣𝙨? + +All well known frameworks let agents write their actions as JSON strings. + +We 𝗽𝗿𝗲𝗳𝗲𝗿𝗿𝗲𝗱 𝘁𝗼 𝗴𝗼 𝘄𝗶𝘁𝗵 𝗳𝗼𝗿𝗺𝘂𝗹𝗮𝘁𝗶𝗻𝗴 𝗮𝗰𝘁𝗶𝗼𝗻𝘀 𝗶𝗻 𝗖𝗼𝗱𝗲, 𝘄𝗵𝗶𝗰𝗵 𝗶𝘀 𝗺𝘂𝗰𝗵 𝗺𝗼𝗿𝗲 𝘃𝗲𝗿𝘀𝗮𝘁𝗶𝗹𝗲 𝗮𝗻𝗱 𝗰𝗼𝗻𝗰𝗶𝘀𝗲, 𝗮𝗻𝗱 𝗮𝗹𝗹𝗼𝘄𝘀 𝘁𝗼 𝗰𝗵𝗮𝗶𝗻 𝗮𝗰𝘁𝗶𝗼𝗻𝘀 𝘀𝗲𝗮𝗺𝗹𝗲𝘀𝘀𝗹𝘆: see the picture attached for an example where Code formulation really shines. + +And the paper confirms our choice: researchers show that 𝗰𝗼𝗺𝗽𝗮𝗿𝗲𝗱 𝘁𝗼 𝗝𝗦𝗢𝗡 𝗼𝗿 𝗽𝗹𝗮𝗶𝗻 𝘁𝗲𝘅𝘁, 𝗖𝗼𝗱𝗲 𝗶𝘀 𝗯𝗲𝘁𝘁𝗲𝗿 𝗯𝗼𝘁𝗵 𝗶𝗻 𝗰𝗼𝗻𝗰𝗶𝘀𝗲𝗻𝗲𝘀𝘀 𝗮𝗻𝗱 𝗽𝗲𝗿𝗳𝗼𝗿𝗺𝗮𝗻𝗰𝗲: +➤ Up to 30% fewer steps for the same actions (much more concise) +➤ Up to 20% higher performance on benchmarks + +And we find additional benefits, for instance a natural handling of variables. + +Read the paper here 📖 https://huggingface.co/papers/2402.01030 +Get your ReactCodeAgent running with our Agents framework! 👉 https://huggingface.co/learn/cookbook/agents",[],"[{'_id': '6452927e3f80ad88c7794977', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/D78gS9F1gE6mwdbpyzT5K.jpeg', 'fullname': 'Sergei Petrov', 'name': 'sergeipetrov', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 51}]","[{'reaction': '🔥', 'users': ['Alexandro14', 'clem', 'Rybens'], 'count': 3}]",2024-05-27 16:41:17,2024-05-27 16:41:17.317,[],/posts/m-ric/822144949711868,842,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,733481799754673,"[{'type': 'text', 'value': 'We will be providing ZeroGPU grants (for Spaces inference) to those who want to fine-tune PaliGemma and build a Space 🔥', 'raw': 'We will be providing ZeroGPU grants (for Spaces inference) to those who want to fine-tune PaliGemma and build a Space 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can pick any dataset of your choice!', 'raw': 'You can pick any dataset of your choice!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Example code: ', 'raw': 'Example code: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1x_OEphRK0H97DqqxEyiMewqsTiLD_Xmi?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1x_OEphRK0H97DqqxEyiMewqsTiLD_Xmi?usp=sharing'}, {'type': 'text', 'value': ' (you can use a lower GPU with QLoRA)', 'raw': ' (you can use a lower GPU with QLoRA)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Datasets: ', 'raw': 'Datasets: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/datasets?task_categories=task_categories:text-to-image&sort=trending', 'raw': 'https://huggingface.co/datasets?task_categories=task_categories:text-to-image&sort=trending'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/datasets?task_categories=task_categories:image-to-text&sort=trending', 'raw': 'https://huggingface.co/datasets?task_categories=task_categories:image-to-text&sort=trending'}]","We will be providing ZeroGPU grants (for Spaces inference) to those who want to fine-tune PaliGemma and build a Space 🔥 + +You can pick any dataset of your choice! + +Example code: https://colab.research.google.com/drive/1x_OEphRK0H97DqqxEyiMewqsTiLD_Xmi?usp=sharing (you can use a lower GPU with QLoRA) + +Datasets: +https://huggingface.co/datasets?task_categories=task_categories:text-to-image&sort=trending +https://huggingface.co/datasets?task_categories=task_categories:image-to-text&sort=trending",[],[],"[{'reaction': '🔥', 'users': ['Tonic', 'rishabh063', 'KingNish', 'SixOpen', 'm-aliabbas1', 'ajibawa-2023', 'KvrParaskevi', 'seyf1elislam', 'clem', 'osanseviero', 'louisbrulenaudet', 'lunarflu', 'mattmdjaga'], 'count': 13}, {'reaction': '🚀', 'users': ['Tonic', 'thliang01', 'clem', 'osanseviero', 'lunarflu', 'Taylor658'], 'count': 6}]",2024-05-27 12:11:46,2024-05-29 06:28:13.729,"[{'_id': '63961f473d7eadea746d7c13', 'avatarUrl': '/avatars/13cba4d6338663c2fab1cb399b2db018.svg', 'fullname': 'Rishabh Jalan', 'name': 'rishabh063', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}, {'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786, 'isFollowing': False}, {'_id': '6656b53443b29fab877cb42b', 'avatarUrl': '/avatars/70c24b26f8314ee9b1919a240a4e1af9.svg', 'fullname': 'lyz', 'name': 'kkkkkkelvin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/merve/733481799754673,1303,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/61868ce808aae0b5499a2a95/F6BA0anbsoY_Z7M1JrwOe.jpeg,6819.0,Sylvain Filoni,fffiloni,365623455051476,"[{'type': 'text', 'value': '🇫🇷 ', 'raw': '🇫🇷 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Quel impact de l’IA sur les filières du cinéma, de l’audiovisuel et du jeu vidéo? ', 'raw': 'Quel impact de l’IA sur les filières du cinéma, de l’audiovisuel et du jeu vidéo? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Etude prospective à destination des professionnels ', 'raw': 'Etude prospective à destination des professionnels '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '— CNC & BearingPoint | 09/04/2024', 'raw': '— CNC & BearingPoint | 09/04/2024'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Si l’Intelligence Artificielle (IA) est utilisée de longue date dans les secteurs du cinéma, de l’audiovisuel et du jeu vidéo, les nouvelles applications de l’IA générative bousculent notre vision de ce dont est capable une machine et possèdent un potentiel de transformation inédit. Elles impressionnent par la qualité de leurs productions et suscitent par conséquent de nombreux débats, entre attentes et appréhensions.', 'raw': 'Si l’Intelligence Artificielle (IA) est utilisée de longue date dans les secteurs du cinéma, de l’audiovisuel et du jeu vidéo, les nouvelles applications de l’IA générative bousculent notre vision de ce dont est capable une machine et possèdent un potentiel de transformation inédit. Elles impressionnent par la qualité de leurs productions et suscitent par conséquent de nombreux débats, entre attentes et appréhensions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Le CNC a donc décider de lancer un nouvel Observatoire de l’IA Afin de mieux comprendre les usages de l’IA et ses impacts réels sur la filière de l’image. Dans le cadre de cet Observatoire, le CNC a souhaité dresser un premier état des lieux à travers la cartographie des usages actuels ou potentiels de l’IA à chaque étape du processus de création et de diffusion d’une œuvre, en identifiant les opportunités et risques associés, notamment en termes de métiers et d’emploi. Cette étude CNC / Bearing Point en a présenté les principaux enseignements le 6 mars, lors de la journée CNC « Créer, produire, diffuser à l’heure de l’intelligence artificielle ».', 'raw': 'Le CNC a donc décider de lancer un nouvel Observatoire de l’IA Afin de mieux comprendre les usages de l’IA et ses impacts réels sur la filière de l’image. Dans le cadre de cet Observatoire, le CNC a souhaité dresser un premier état des lieux à travers la cartographie des usages actuels ou potentiels de l’IA à chaque étape du processus de création et de diffusion d’une œuvre, en identifiant les opportunités et risques associés, notamment en termes de métiers et d’emploi. Cette étude CNC / Bearing Point en a présenté les principaux enseignements le 6 mars, lors de la journée CNC « Créer, produire, diffuser à l’heure de l’intelligence artificielle ».'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Le CNC publie la version augmentée de la cartographie des usages de l’IA dans les filières du cinéma, de l’audiovisuel et du jeu vidéo.', 'raw': 'Le CNC publie la version augmentée de la cartographie des usages de l’IA dans les filières du cinéma, de l’audiovisuel et du jeu vidéo.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lien vers la cartographie complète: ', 'raw': 'Lien vers la cartographie complète: '}, {'type': 'link', 'href': 'https://www.cnc.fr/documents/36995/2097582/Cartographie+des+usages+IA_rapport+complet.pdf/96532829-747e-b85e-c74b-af313072cab7?t=1712309387891', 'raw': 'https://www.cnc.fr/documents/36995/2097582/Cartographie+des+usages+IA_rapport+complet.pdf/96532829-747e-b85e-c74b-af313072cab7?t=1712309387891'}, {'type': 'new_line', 'raw': '\n'}]","🇫🇷 +Quel impact de l’IA sur les filières du cinéma, de l’audiovisuel et du jeu vidéo? +Etude prospective à destination des professionnels +— CNC & BearingPoint | 09/04/2024 + +Si l’Intelligence Artificielle (IA) est utilisée de longue date dans les secteurs du cinéma, de l’audiovisuel et du jeu vidéo, les nouvelles applications de l’IA générative bousculent notre vision de ce dont est capable une machine et possèdent un potentiel de transformation inédit. Elles impressionnent par la qualité de leurs productions et suscitent par conséquent de nombreux débats, entre attentes et appréhensions. + +Le CNC a donc décider de lancer un nouvel Observatoire de l’IA Afin de mieux comprendre les usages de l’IA et ses impacts réels sur la filière de l’image. Dans le cadre de cet Observatoire, le CNC a souhaité dresser un premier état des lieux à travers la cartographie des usages actuels ou potentiels de l’IA à chaque étape du processus de création et de diffusion d’une œuvre, en identifiant les opportunités et risques associés, notamment en termes de métiers et d’emploi. Cette étude CNC / Bearing Point en a présenté les principaux enseignements le 6 mars, lors de la journée CNC « Créer, produire, diffuser à l’heure de l’intelligence artificielle ». + +Le CNC publie la version augmentée de la cartographie des usages de l’IA dans les filières du cinéma, de l’audiovisuel et du jeu vidéo. + +Lien vers la cartographie complète: https://www.cnc.fr/documents/36995/2097582/Cartographie+des+usages+IA_rapport+complet.pdf/96532829-747e-b85e-c74b-af313072cab7?t=1712309387891 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/B3XH9u7lLYjiLrcd1sYq5.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/LLpLjWDUQY7iLrUV5MUcO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/xe5-Qw1tONFJeJSwiCOXN.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/NPZ85X7kYoo85YDcwJuXO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/dDRfsnGrdyS0chygJxMLo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/KZAEFhBQdPPM5yn590ugd.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/wSetaKb49l6-QTvHks1ms.png'}]",[],"[{'reaction': '❤️', 'users': ['Cos-s', 'Ramikan-BR', 'TheoLvs', 'clefourrier', 'leosok', 'AI-FLAMMMMMMMMMMINGO', 'menorki', 'spel', 'OmbelineM'], 'count': 9}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}]",2024-05-27 10:14:01,2024-07-06 12:01:24.494,"[{'_id': '63e80664e02ee67e8e570ec4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63e80664e02ee67e8e570ec4/rGfRhywmjd_lbqfYzOEdd.png', 'fullname': 'EsKa', 'name': 'SerialKicked', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 19, 'isFollowing': False}, {'_id': '61868ce808aae0b5499a2a95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61868ce808aae0b5499a2a95/F6BA0anbsoY_Z7M1JrwOe.jpeg', 'fullname': 'Sylvain Filoni', 'name': 'fffiloni', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6819, 'isFollowing': False}, {'_id': '66892d5f087be104a4e25e71', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/rch-WVoMtb1lmpxvpneKY.png', 'fullname': 'Erwan Wahad Soumhi', 'name': 'AI-FLAMMMMMMMMMMINGO', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/fffiloni/365623455051476,19554,,4 +/avatars/34a6ca3ce3efbfc2de107c358d635752.svg,1.0,Yuan Liu,flashback29,136294804241050,"[{'type': 'text', 'value': 'Just subscribed the PRO monthly, but still got rate limited when making the inference API call ', 'raw': 'Just subscribed the PRO monthly, but still got rate limited when making the inference API call '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ' const api_url = ""https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B"";\n const payload = JSON.stringify({\n ""query"": input,\n });\n\n const body = {\n ""headers"" : {""Authorization"": `Bearer ${API_TOKEN}`},\n ""wait_for_model"": true,\n ""use_gpu"": false,\n ""method"" : ""POST"",\n ""contentType"" : ""application/json"",\n ""payload"" : payload\n };\n \n var xmlHttp = new XMLHttpRequest();\n xmlHttp.open(""POST"", api_url, false);\n xmlHttp.send(body);\n return xmlHttp.responseText;', 'raw': '```\n const api_url = ""https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B"";\n const payload = JSON.stringify({\n ""query"": input,\n });\n\n const body = {\n ""headers"" : {""Authorization"": `Bearer ${API_TOKEN}`},\n ""wait_for_model"": true,\n ""use_gpu"": false,\n ""method"" : ""POST"",\n ""contentType"" : ""application/json"",\n ""payload"" : payload\n };\n \n var xmlHttp = new XMLHttpRequest();\n xmlHttp.open(""POST"", api_url, false);\n xmlHttp.send(body);\n return xmlHttp.responseText;\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Need some help', 'raw': 'Need some help'}]","Just subscribed the PRO monthly, but still got rate limited when making the inference API call + + +``` + const api_url = ""https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B""; + const payload = JSON.stringify({ + ""query"": input, + }); + + const body = { + ""headers"" : {""Authorization"": `Bearer ${API_TOKEN}`}, + ""wait_for_model"": true, + ""use_gpu"": false, + ""method"" : ""POST"", + ""contentType"" : ""application/json"", + ""payload"" : payload + }; + + var xmlHttp = new XMLHttpRequest(); + xmlHttp.open(""POST"", api_url, false); + xmlHttp.send(body); + return xmlHttp.responseText; +``` +Need some help",[],[],[],2024-05-26 23:44:06,2024-05-28 07:59:53.290,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '66517cc42fbc9f8285272269', 'avatarUrl': '/avatars/34a6ca3ce3efbfc2de107c358d635752.svg', 'fullname': 'Yuan Liu', 'name': 'flashback29', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '5e2967b819407e3277369b95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1608285816082-5e2967b819407e3277369b95.png', 'fullname': 'Nicolas Patry', 'name': 'Narsil', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 209, 'isFollowing': False}]",/posts/flashback29/136294804241050,311,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/64b999a40b24527e9c25583a/xFHCewJdf5EGn8qDPypqy.jpeg,54.0,David Golchinfar,DavidGF,885841437422630,"[{'type': 'text', 'value': 'The kraken has awakened!', 'raw': 'The kraken has awakened!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A Game-Changer in LLM Flexibility and Performance!', 'raw': 'A Game-Changer in LLM Flexibility and Performance!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Over the past few weeks, VAGO solutions teamed up with Cognitive Computations and HyperSpace to develop a groundbreaking architecture that redefines flexibility in combining different LLM into one model.', 'raw': 'Over the past few weeks, VAGO solutions teamed up with Cognitive Computations and HyperSpace to develop a groundbreaking architecture that redefines flexibility in combining different LLM into one model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'fernandofernandes', 'raw': '@fernandofernandes'}, {'type': 'text', 'value': ' , me, ', 'raw': ' , me, '}, {'type': 'mention', 'user': 'Crystalcareai', 'raw': '@Crystalcareai'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'ehartford', 'raw': '@ehartford'}, {'type': 'text', 'value': ' created the Kraken!', 'raw': ' created the Kraken!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' What Can It Do? 🐙 ', 'raw': ' What Can It Do? 🐙 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Versatile Architecture: Kraken allows the seamless combination of LLMs with varying sizes, quantizations, and model architectures. It currently supports quantizations in 4-bit, 8-bit, and AWQ, with more on the way. And it runs on Hugging Face Transformers 4.40+', 'raw': '✅ Versatile Architecture: Kraken allows the seamless combination of LLMs with varying sizes, quantizations, and model architectures. It currently supports quantizations in 4-bit, 8-bit, and AWQ, with more on the way. And it runs on Hugging Face Transformers 4.40+'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Kraken Router: Utilizing a custom sequence classification model with a context length of 32k tokens, The Kraken Router directs inputs to the most suitable Expert based on their characteristics.', 'raw': '✅ Kraken Router: Utilizing a custom sequence classification model with a context length of 32k tokens, The Kraken Router directs inputs to the most suitable Expert based on their characteristics.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Adaptability: Enhanced input formatting supports the model’s adaptability to diverse conversational contexts.', 'raw': '✅ Adaptability: Enhanced input formatting supports the model’s adaptability to diverse conversational contexts.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""✅ Extreme Versatility: Easily swap experts within Kraken for your specific use cases without retraining the entire model. For example, if you've built a Kraken for coding in Python you can upgrade your Python model without retraining the router or add a C# model by retraining the router."", 'raw': ""✅ Extreme Versatility: Easily swap experts within Kraken for your specific use cases without retraining the entire model. For example, if you've built a Kraken for coding in Python you can upgrade your Python model without retraining the router or add a C# model by retraining the router.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ Open Source Pipeline: We’re sharing the entire pipeline, including router creation, training, architecture setup, and Kraken inference, on JupyterNotebooks: ', 'raw': '✅ Open Source Pipeline: We’re sharing the entire pipeline, including router creation, training, architecture setup, and Kraken inference, on JupyterNotebooks: '}, {'type': 'link', 'href': 'https://github.com/cognitivecomputations/kraken', 'raw': 'https://github.com/cognitivecomputations/kraken'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kraken marks the beginning of an exciting new journey in #OpenSource LLM. Why? Because it empowers the open source community in accelerating the catch-up process to proprietary LLMs like #GPT and #Claude 🤩', 'raw': 'Kraken marks the beginning of an exciting new journey in #OpenSource LLM. Why? Because it empowers the open source community in accelerating the catch-up process to proprietary LLMs like #GPT and #Claude 🤩'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We proudly introduce the very first 2 Kraken models, that integrates top-tier LLM and Multilingual capabilities: ', 'raw': 'We proudly introduce the very first 2 Kraken models, that integrates top-tier LLM and Multilingual capabilities: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'cognitivecomputations/Kraken'}, 'url': 'https://huggingface.co/cognitivecomputations/Kraken', 'raw': 'https://huggingface.co/cognitivecomputations/Kraken'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'VAGOsolutions/Kraken-Multilingual'}, 'url': 'https://huggingface.co/VAGOsolutions/Kraken-Multilingual', 'raw': 'https://huggingface.co/VAGOsolutions/Kraken-Multilingual'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': "" Right now it's supported by Hugging Face transformers library. Would love to see the integration into VLM and TGWI!"", 'raw': "" Right now it's supported by Hugging Face transformers library. Would love to see the integration into VLM and TGWI!""}]","The kraken has awakened! +A Game-Changer in LLM Flexibility and Performance! + +Over the past few weeks, VAGO solutions teamed up with Cognitive Computations and HyperSpace to develop a groundbreaking architecture that redefines flexibility in combining different LLM into one model. + +@fernandofernandes , me, @Crystalcareai , @ehartford created the Kraken! + + What Can It Do? 🐙 +✅ Versatile Architecture: Kraken allows the seamless combination of LLMs with varying sizes, quantizations, and model architectures. It currently supports quantizations in 4-bit, 8-bit, and AWQ, with more on the way. And it runs on Hugging Face Transformers 4.40+ + +✅ Kraken Router: Utilizing a custom sequence classification model with a context length of 32k tokens, The Kraken Router directs inputs to the most suitable Expert based on their characteristics. + +✅ Adaptability: Enhanced input formatting supports the model’s adaptability to diverse conversational contexts. + +✅ Extreme Versatility: Easily swap experts within Kraken for your specific use cases without retraining the entire model. For example, if you've built a Kraken for coding in Python you can upgrade your Python model without retraining the router or add a C# model by retraining the router. + +✅ Open Source Pipeline: We’re sharing the entire pipeline, including router creation, training, architecture setup, and Kraken inference, on JupyterNotebooks: https://github.com/cognitivecomputations/kraken + +Kraken marks the beginning of an exciting new journey in #OpenSource LLM. Why? Because it empowers the open source community in accelerating the catch-up process to proprietary LLMs like #GPT and #Claude 🤩 + +We proudly introduce the very first 2 Kraken models, that integrates top-tier LLM and Multilingual capabilities: +https://huggingface.co/cognitivecomputations/Kraken +https://huggingface.co/VAGOsolutions/Kraken-Multilingual + Right now it's supported by Hugging Face transformers library. Would love to see the integration into VLM and TGWI!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64b999a40b24527e9c25583a/TMr1lg0mqRwDu0lrUCLEJ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64b999a40b24527e9c25583a/m2ep1SiWzrbsxz7tshUBP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64b999a40b24527e9c25583a/DdTFaPhhg3nl7vXIneLr_.png'}]","[{'_id': '658cfefb63d9c84928e94ad8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/658cfefb63d9c84928e94ad8/Wi_EsL_33XCCTe5LUVwDo.jpeg', 'fullname': 'Lucas Atkins', 'name': 'Crystalcareai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 171}, {'_id': '63111b2d88942700629f5771', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63111b2d88942700629f5771/u2a9y-yx6TG0N31OhMSHI.png', 'fullname': 'Eric Hartford', 'name': 'ehartford', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4406}, {'_id': '646e57a5cb6ea6e6b6df1ad4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646e57a5cb6ea6e6b6df1ad4/PlGhM2SUynFBUdYAylaZK.jpeg', 'fullname': 'Fernando Fernandes Neto', 'name': 'fernandofernandes', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 54}]","[{'reaction': '🔥', 'users': ['pabloce', 'agenkit', 'Johnz86', 'Crystalcareai', 'Locutusque', 'osanseviero', 'NotASI', 'seyf1elislam'], 'count': 8}, {'reaction': '👍', 'users': ['jlzhou'], 'count': 1}]",2024-05-22 15:07:07,2024-05-22 15:15:07.824,[],/posts/DavidGF/885841437422630,1684,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,115164605711086,"[{'type': 'text', 'value': 'we recently shipped fine-grained access tokens on Hugging Face Hub, which lets you create tokens with super specific permissions', 'raw': 'we recently shipped fine-grained access tokens on Hugging Face Hub, which lets you create tokens with super specific permissions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""for instance, if you want to collaborate with an external organization you don't want to use your write token since they can access everything you can access. instead you can set token access to repositories under that org only like below "", 'raw': ""for instance, if you want to collaborate with an external organization you don't want to use your write token since they can access everything you can access. instead you can set token access to repositories under that org only like below ""}, {'type': 'new_line', 'raw': '\n'}]","we recently shipped fine-grained access tokens on Hugging Face Hub, which lets you create tokens with super specific permissions + +for instance, if you want to collaborate with an external organization you don't want to use your write token since they can access everything you can access. instead you can set token access to repositories under that org only like below +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/TSd4qztw0mFWrDPeNtm3P.mp4'}]",[],"[{'reaction': '🔥', 'users': ['osanseviero', 'nisten', 'SixOpen', 'alielfilali01', 'Ramikan-BR'], 'count': 5}, {'reaction': '👍', 'users': ['anzorq', 'fffiloni', 'SixOpen', 'Ramikan-BR'], 'count': 4}]",2024-05-22 14:30:16,2024-05-22 14:30:16.855,[],/posts/merve/115164605711086,2014,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg,77.0,Kenneth Hamilton,ZennyKenny,639106598938394,"[{'type': 'text', 'value': 'Thanks to the incredible collaboration of 14 community annotators, ', 'raw': 'Thanks to the incredible collaboration of 14 community annotators, '}, {'type': 'mention', 'user': 'davanstrien', 'raw': '@davanstrien'}, {'type': 'text', 'value': ' of HF and ', 'raw': ' of HF and '}, {'type': 'mention', 'user': 'dvilasuero', 'raw': '@dvilasuero'}, {'type': 'text', 'value': ' et. al of Argilla, DIBT (', 'raw': ' et. al of Argilla, DIBT ('}, {'type': 'resource', 'resource': {'type': 'userOrOrg', 'id': 'DIBT'}, 'url': 'https://huggingface.co/DIBT', 'raw': 'https://huggingface.co/DIBT'}, {'type': 'text', 'value': ') is pleased to make available a Russian-language dataset of 500 of the best curated LLM prompts translated to Russian and available for use: ', 'raw': ') is pleased to make available a Russian-language dataset of 500 of the best curated LLM prompts translated to Russian and available for use: '}, {'type': 'link', 'href': 'https://huggingface.co/datasets/DIBT/MPEP_RUSSIAN', 'raw': 'https://huggingface.co/datasets/DIBT/MPEP_RUSSIAN'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More to come from the MPEP initiative! Interested in annotating or leading a language team? ', 'raw': 'More to come from the MPEP initiative! Interested in annotating or leading a language team? '}, {'type': 'link', 'href': 'https://github.com/huggingface/data-is-better-together/tree/main/prompt_translation', 'raw': 'https://github.com/huggingface/data-is-better-together/tree/main/prompt_translation'}]","Thanks to the incredible collaboration of 14 community annotators, @davanstrien of HF and @dvilasuero et. al of Argilla, DIBT (https://huggingface.co/DIBT) is pleased to make available a Russian-language dataset of 500 of the best curated LLM prompts translated to Russian and available for use: https://huggingface.co/datasets/DIBT/MPEP_RUSSIAN. + +More to come from the MPEP initiative! Interested in annotating or leading a language team? https://github.com/huggingface/data-is-better-together/tree/main/prompt_translation",[],"[{'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638}, {'_id': '60420dccc15e823a685f2b03', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg', 'fullname': 'Daniel Vila', 'name': 'dvilasuero', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 334}]","[{'reaction': '🔥', 'users': ['osanseviero', 'dvilasuero', 'davanstrien', 'gabrielmbmb'], 'count': 4}, {'reaction': '🤗', 'users': ['dvilasuero', 'davanstrien', 'gabrielmbmb'], 'count': 3}]",2024-05-22 14:28:11,2024-05-22 15:20:56.812,"[{'_id': '60420dccc15e823a685f2b03', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg', 'fullname': 'Daniel Vila', 'name': 'dvilasuero', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 334, 'isFollowing': False}, {'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638, 'isFollowing': False}]",/posts/ZennyKenny/639106598938394,1177,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b9df9b22e5b0fdd501a113/i2yTGbK7pFnw9YLwZ7elp.jpeg,4.0,Akhil B,hakunamatata1997,275857882254590,"[{'type': 'text', 'value': 'Can someone suggest me a good open source vision model which performs good at OCR?', 'raw': 'Can someone suggest me a good open source vision model which performs good at OCR?'}]",Can someone suggest me a good open source vision model which performs good at OCR?,[],[],"[{'reaction': '👀', 'users': ['victor', 'ToluClassics'], 'count': 2}]",2024-05-22 14:17:54,2024-06-04 13:03:36.955,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786, 'isFollowing': False}, {'_id': '6115592ab9969269c5abc5c7', 'avatarUrl': '/avatars/a9c90fa059933e00c4e1e376ea3f3677.svg', 'fullname': 'Praveen Kaushik', 'name': 'PKaushik', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7, 'isFollowing': False}, {'_id': '63f706dfe94ed998c463ed66', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63f706dfe94ed998c463ed66/tAGil2qiFNev6CfEEDseV.png', 'fullname': 'Cuiunbo', 'name': 'Cuiunbo', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 22, 'isFollowing': False}, {'_id': '61b9df9b22e5b0fdd501a113', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b9df9b22e5b0fdd501a113/i2yTGbK7pFnw9YLwZ7elp.jpeg', 'fullname': 'Akhil B', 'name': 'hakunamatata1997', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '62878fdc70af5d9106e3e892', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1653051419389-62878fdc70af5d9106e3e892.png', 'fullname': 'K S', 'name': 'MultiTrickFox', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}]",/posts/hakunamatata1997/275857882254590,1054,,12 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,622865326118065,"[{'type': 'text', 'value': 'Excited to share a new project to make journalists’ lives easier when gathering information!', 'raw': 'Excited to share a new project to make journalists’ lives easier when gathering information!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Collecting data like lists, URLs, etc., from websites is not always easy (and sometimes painful). Web scraping requires technical skills that only a handful of people in each newsroom have.', 'raw': 'Collecting data like lists, URLs, etc., from websites is not always easy (and sometimes painful). Web scraping requires technical skills that only a handful of people in each newsroom have.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I recently stumbled upon ', 'raw': 'I recently stumbled upon '}, {'type': 'mention', 'user': 'scrapegraphai', 'raw': '@scrapegraphai'}, {'type': 'text', 'value': ', a scraper that does the heavy lifting with AI for the user with a simple prompt in natural language. I asked them if they could integrate the Hugging Face Hub to use open-source models and created a no-code, easy-to-use interface on Gradio.', 'raw': ', a scraper that does the heavy lifting with AI for the user with a simple prompt in natural language. I asked them if they could integrate the Hugging Face Hub to use open-source models and created a no-code, easy-to-use interface on Gradio.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can then save time and focus on storytelling!', 'raw': 'You can then save time and focus on storytelling!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔧 How It Works', 'raw': '🔧 How It Works'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Input Your Prompt and Source URL', 'raw': '1. Input Your Prompt and Source URL'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Click ‘Scrape and Summarize’', 'raw': '2. Click ‘Scrape and Summarize’'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Receive Summarized Results', 'raw': '3. Receive Summarized Results'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👩\u200d💻 Get Involved!', 'raw': '👩\u200d💻 Get Involved!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is just the first version of the tool, and it’s pretty basic. I’ve uploaded it to the Journalists on Hugging Face community so we can work together on it. Whether you’re a developer, a data scientist, or a journalist with ideas, you can contribute to this project.', 'raw': 'This is just the first version of the tool, and it’s pretty basic. I’ve uploaded it to the Journalists on Hugging Face community so we can work together on it. Whether you’re a developer, a data scientist, or a journalist with ideas, you can contribute to this project.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can also copy this app to your own account or organization to customize it to your needs.', 'raw': 'You can also copy this app to your own account or organization to customize it to your needs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 Test the scraper here: ', 'raw': '👉 Test the scraper here: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'JournalistsonHF/ai-scraper'}, 'url': 'https://huggingface.co/spaces/JournalistsonHF/ai-scraper', 'raw': 'https://huggingface.co/spaces/JournalistsonHF/ai-scraper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤝 Join the Journalists on 🤗 community: ', 'raw': '🤝 Join the Journalists on 🤗 community: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'JournalistsonHF'}, 'url': 'https://huggingface.co/JournalistsonHF', 'raw': 'https://huggingface.co/JournalistsonHF', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63691c3eda9b693c2730b2a2/WoOIHdJahrAnLo1wpyVjc.png'}, {'type': 'new_line', 'raw': '\n'}]","Excited to share a new project to make journalists’ lives easier when gathering information! + +Collecting data like lists, URLs, etc., from websites is not always easy (and sometimes painful). Web scraping requires technical skills that only a handful of people in each newsroom have. + +I recently stumbled upon @scrapegraphai, a scraper that does the heavy lifting with AI for the user with a simple prompt in natural language. I asked them if they could integrate the Hugging Face Hub to use open-source models and created a no-code, easy-to-use interface on Gradio. + +You can then save time and focus on storytelling! + +🔧 How It Works +1. Input Your Prompt and Source URL +2. Click ‘Scrape and Summarize’ +3. Receive Summarized Results + +👩‍💻 Get Involved! +This is just the first version of the tool, and it’s pretty basic. I’ve uploaded it to the Journalists on Hugging Face community so we can work together on it. Whether you’re a developer, a data scientist, or a journalist with ideas, you can contribute to this project. + +You can also copy this app to your own account or organization to customize it to your needs. + +👉 Test the scraper here: https://huggingface.co/spaces/JournalistsonHF/ai-scraper + +🤝 Join the Journalists on 🤗 community: https://huggingface.co/JournalistsonHF +",[],[],"[{'reaction': '🔥', 'users': ['evijit', 'osanseviero', 'louisbrulenaudet', 'Emmanuellepicaud', 'British-Rat'], 'count': 5}]",2024-05-22 13:23:21,2024-05-22 13:23:21.636,[],/posts/fdaudens/622865326118065,1040,,0 +/avatars/11e3c9c66d28490d6d09925f9aa47cd1.svg,33.0,JunhaoZhuang,JunhaoZhuang,741206747196094,"[{'type': 'text', 'value': 'We open source a new image inpainting model PowerPaint v2-1: ', 'raw': 'We open source a new image inpainting model PowerPaint v2-1: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'JunhaoZhuang/PowerPaint-v2-1'}, 'url': 'https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1', 'raw': 'https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1'}]","We open source a new image inpainting model PowerPaint v2-1: +https://huggingface.co/JunhaoZhuang/PowerPaint-v2-1",[],[],"[{'reaction': '👍', 'users': ['kap9911', 'victor', 'jeanflop', 'JunhaoZhuang', 'Shinku', 'Norod78', 'hungdang1610', 'zengyh1900', 'alansugar'], 'count': 9}, {'reaction': '🔥', 'users': ['philmat1986', 'blanchon'], 'count': 2}]",2024-05-22 12:44:29,2024-05-22 14:23:00.945,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}]",/posts/JunhaoZhuang/741206747196094,3260,,1 +/avatars/efc6a9cb98a6b485f7bcb11e5b7b143f.svg,3.0,Grace Smith,BoredApeYachtClub,876437298354738,"[{'type': 'text', 'value': 'My Favourite songs ', 'raw': 'My Favourite songs '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/jJvDnYdD8JQ', 'raw': 'https://youtu.be/jJvDnYdD8JQ'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Katy perry roar, fireworks.', 'raw': 'Katy perry roar, fireworks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paula Abdul - Straight Up (Lyrics), Paula Abdul Rush Rush (Lyricshttps://www.youtube.com/watch?v=c_43Qf_d-CA, ', 'raw': 'Paula Abdul - Straight Up (Lyrics), Paula Abdul Rush Rush (Lyricshttps://www.youtube.com/watch?v=c_43Qf_d-CA, '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=eqSdQ5gJC7A', 'raw': 'https://www.youtube.com/watch?v=eqSdQ5gJC7A'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'BLACKPINK, Selena Gomez - Ice Cream (Lyrics) ', 'raw': 'BLACKPINK, Selena Gomez - Ice Cream (Lyrics) '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=pcrnh069iBI', 'raw': 'https://www.youtube.com/watch?v=pcrnh069iBI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lil Nas X - Old Town Road (Lyrics link not included)', 'raw': 'Lil Nas X - Old Town Road (Lyrics link not included)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(SUNO.AI IS A SOFTWARE WHERE YOU CAN CREATE YOUR ON MUSIC WITH VOCAL SINGERS WRITE A PROMPT OR ADD YOUR OWN LYRICS)', 'raw': '(SUNO.AI IS A SOFTWARE WHERE YOU CAN CREATE YOUR ON MUSIC WITH VOCAL SINGERS WRITE A PROMPT OR ADD YOUR OWN LYRICS)'}]","My Favourite songs + +https://youtu.be/jJvDnYdD8JQ +Katy perry roar, fireworks. + +Paula Abdul - Straight Up (Lyrics), Paula Abdul Rush Rush (Lyricshttps://www.youtube.com/watch?v=c_43Qf_d-CA, https://www.youtube.com/watch?v=eqSdQ5gJC7A + +BLACKPINK, Selena Gomez - Ice Cream (Lyrics) https://www.youtube.com/watch?v=pcrnh069iBI + +Lil Nas X - Old Town Road (Lyrics link not included) + +(SUNO.AI IS A SOFTWARE WHERE YOU CAN CREATE YOUR ON MUSIC WITH VOCAL SINGERS WRITE A PROMPT OR ADD YOUR OWN LYRICS)",[],[],"[{'reaction': '👍', 'users': ['kap9911', 'Schmitz005'], 'count': 2}]",2024-05-22 10:40:42,2024-05-22 11:02:38.850,[],/posts/BoredApeYachtClub/876437298354738,1219,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6281d941eeb15579946ca3ce/0CdrBop_kjRkOqxUTYFbf.jpeg,13.0,Hui Sun,CocoSun,715620889191225,"[{'type': 'text', 'value': 'Google LLM(Multimodal) Medical Foundation Model Summary', 'raw': 'Google LLM(Multimodal) Medical Foundation Model Summary'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1.Med-PaLM: Large language models encode clinical knowledge, ', 'raw': '1.Med-PaLM: Large language models encode clinical knowledge, '}, {'type': 'link', 'href': 'https://www.nature.com/articles/s41586-023-06291-2', 'raw': 'https://www.nature.com/articles/s41586-023-06291-2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2.Med-PaLM 2: Towards Expert-Level Medical Question Answering with Large Language Models, ', 'raw': '2.Med-PaLM 2: Towards Expert-Level Medical Question Answering with Large Language Models, '}, {'type': 'link', 'href': 'http://arxiv.org/abs/2305.09617', 'raw': 'http://arxiv.org/abs/2305.09617'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3.Med-PaLM M: Towards Generalist Biomedical AI, ', 'raw': '3.Med-PaLM M: Towards Generalist Biomedical AI, '}, {'type': 'link', 'href': 'http://arxiv.org/abs/2307.14334', 'raw': 'http://arxiv.org/abs/2307.14334'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4.Med-Gemini: Capabilities of Gemini Models in Medicine, ', 'raw': '4.Med-Gemini: Capabilities of Gemini Models in Medicine, '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2404.18416v2', 'raw': 'https://arxiv.org/abs/2404.18416v2'}, {'type': 'text', 'value': '; Advancing Multimodal Medical Capabilities of Gemini, ', 'raw': '; Advancing Multimodal Medical Capabilities of Gemini, '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2405.03162', 'raw': 'https://arxiv.org/abs/2405.03162'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Google LLM(Multimodal) Medical Foundation Model Summary + +1.Med-PaLM: Large language models encode clinical knowledge, https://www.nature.com/articles/s41586-023-06291-2 +2.Med-PaLM 2: Towards Expert-Level Medical Question Answering with Large Language Models, http://arxiv.org/abs/2305.09617 +3.Med-PaLM M: Towards Generalist Biomedical AI, http://arxiv.org/abs/2307.14334 +4.Med-Gemini: Capabilities of Gemini Models in Medicine, https://arxiv.org/abs/2404.18416v2; Advancing Multimodal Medical Capabilities of Gemini, https://arxiv.org/abs/2405.03162 + +",[],[],"[{'reaction': '🔥', 'users': ['osanseviero', 'Schmitz005', 'Taylor658', 'CocoSun'], 'count': 4}]",2024-05-22 02:51:39,2025-03-07 06:22:15.227,"[{'_id': '66794b93892c0a17160c9064', 'avatarUrl': '/avatars/dd56681a76f01e562506ec5943f2a621.svg', 'fullname': 'Francesc Roca', 'name': 'Franroca-32', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '63a350bdb5fc9ab9f6413626', 'avatarUrl': '/avatars/d30141328f9d424378f007887debdec5.svg', 'fullname': 'Shahbaz Ahmad', 'name': 'shahbbzz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '6281d941eeb15579946ca3ce', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6281d941eeb15579946ca3ce/0CdrBop_kjRkOqxUTYFbf.jpeg', 'fullname': 'Hui Sun', 'name': 'CocoSun', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 13, 'isFollowing': False}]",/posts/CocoSun/715620889191225,3343,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,827063866130454,"[{'type': 'text', 'value': ""Journey With Me Into The Mind of Large Language Models: Interesting Findings in AnthropicAI's Scaling Monosemanticity paper."", 'raw': ""Journey With Me Into The Mind of Large Language Models: Interesting Findings in AnthropicAI's Scaling Monosemanticity paper.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""One of the many unknowns with LLMs is the why behind the responses they give - it's unclear why certain responses are chosen over others. Which shows how little we know of what's happening inside these models. "", 'raw': ""One of the many unknowns with LLMs is the why behind the responses they give - it's unclear why certain responses are chosen over others. Which shows how little we know of what's happening inside these models. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To have a deeper sense of this, they tried Sparse Dictionary Learning on a larger model (Claude 3 Sonnet) - wherein they match patterns of neuron activations (named Features) to human interpretable meanings.', 'raw': 'To have a deeper sense of this, they tried Sparse Dictionary Learning on a larger model (Claude 3 Sonnet) - wherein they match patterns of neuron activations (named Features) to human interpretable meanings.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Now Dictionary Learning is a traditional ml technique that identifies recurring patterns of neuron activations across various contexts. Meaning, any internal state of the model can be expressed as a combination of a few active features rather than numerous active neurons.', 'raw': 'Now Dictionary Learning is a traditional ml technique that identifies recurring patterns of neuron activations across various contexts. Meaning, any internal state of the model can be expressed as a combination of a few active features rather than numerous active neurons.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'They scaled up a more effective measure of dictionary learning using a Sparse Autoencoder (SAE). The SAE has an encoder that maps inputs to sparse high-dimensional features via linear transformation & ReLU, and a decoder that reconstructs inputs from those features.', 'raw': 'They scaled up a more effective measure of dictionary learning using a Sparse Autoencoder (SAE). The SAE has an encoder that maps inputs to sparse high-dimensional features via linear transformation & ReLU, and a decoder that reconstructs inputs from those features.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Three variants (of sizes: ~1M, ~4M & ~34M features) of the SAE were trained and across SAEs, <300 active features/token, >65% variance were explained. With dead features: ~2% for 1M, 35% for 4M, 65% for 34M SAE. Implying better training could reduce dead features.\u2028\u2028Experiments were conducted with these SAEs where they were applied to residual stream activations (RSAs) at the model\'s middle layer (why? 1. RSAs are smaller than MLP layers = low compute cost, 2. helps tackle ""cross-layer superposition"" issues - when features are spread across multiple layers instead of being isolated in specific layers, causing interpretation difficulties). These experiments revealed that Scaling Laws can help guide training of these SAEs.', 'raw': 'Three variants (of sizes: ~1M, ~4M & ~34M features) of the SAE were trained and across SAEs, <300 active features/token, >65% variance were explained. With dead features: ~2% for 1M, 35% for 4M, 65% for 34M SAE. Implying better training could reduce dead features.\u2028\u2028Experiments were conducted with these SAEs where they were applied to residual stream activations (RSAs) at the model\'s middle layer (why? 1. RSAs are smaller than MLP layers = low compute cost, 2. helps tackle ""cross-layer superposition"" issues - when features are spread across multiple layers instead of being isolated in specific layers, causing interpretation difficulties). These experiments revealed that Scaling Laws can help guide training of these SAEs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'My favorite of course is the Basic Code Features - where the model attributed meaning to different code syntax elements similar to syntax highlighting in text editors.', 'raw': 'My favorite of course is the Basic Code Features - where the model attributed meaning to different code syntax elements similar to syntax highlighting in text editors.'}, {'type': 'new_line', 'raw': '\n'}]","Journey With Me Into The Mind of Large Language Models: Interesting Findings in AnthropicAI's Scaling Monosemanticity paper. + +One of the many unknowns with LLMs is the why behind the responses they give - it's unclear why certain responses are chosen over others. Which shows how little we know of what's happening inside these models. + +To have a deeper sense of this, they tried Sparse Dictionary Learning on a larger model (Claude 3 Sonnet) - wherein they match patterns of neuron activations (named Features) to human interpretable meanings. + +Now Dictionary Learning is a traditional ml technique that identifies recurring patterns of neuron activations across various contexts. Meaning, any internal state of the model can be expressed as a combination of a few active features rather than numerous active neurons. + +They scaled up a more effective measure of dictionary learning using a Sparse Autoencoder (SAE). The SAE has an encoder that maps inputs to sparse high-dimensional features via linear transformation & ReLU, and a decoder that reconstructs inputs from those features. + +Three variants (of sizes: ~1M, ~4M & ~34M features) of the SAE were trained and across SAEs, <300 active features/token, >65% variance were explained. With dead features: ~2% for 1M, 35% for 4M, 65% for 34M SAE. Implying better training could reduce dead features.

Experiments were conducted with these SAEs where they were applied to residual stream activations (RSAs) at the model's middle layer (why? 1. RSAs are smaller than MLP layers = low compute cost, 2. helps tackle ""cross-layer superposition"" issues - when features are spread across multiple layers instead of being isolated in specific layers, causing interpretation difficulties). These experiments revealed that Scaling Laws can help guide training of these SAEs. + +My favorite of course is the Basic Code Features - where the model attributed meaning to different code syntax elements similar to syntax highlighting in text editors. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/-F-v_cWYNBejd73Icz2jF.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/rUfVzGOiKkkTx8aQAolGC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/kbQGGard3alcYx70VYjN5.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/LqdPmiEiwvx_1D1qbEylv.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/v3n_GBbOgCovUFQ-ZaCCW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/sU7xPDfPSoq8x0JjhBNfk.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/0a0-VcyKtmmqzKbm1ZnuZ.png'}]",[],"[{'reaction': '👍', 'users': ['victor', 'osanseviero', 'Ramikan-BR', 'louisbrulenaudet'], 'count': 4}, {'reaction': '🔥', 'users': ['srisree', 'Ramikan-BR'], 'count': 2}, {'reaction': '🚀', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}]",2024-05-22 02:48:28,2024-05-22 10:07:23.934,"[{'_id': '6438a9027de34e8ea7e4b257', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg', 'fullname': 'Jaward Sesay', 'name': 'Jaward', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 331, 'isFollowing': False}]",/posts/Jaward/827063866130454,1619,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,909060621015691,"[{'type': 'text', 'value': '80% of fact-checked misinformation claims involve media, with a rise in AI-generated content in 2023, according to a new study, “A Large-Scale Survey and Dataset of Media-Based Misinformation In-The-Wild.” Worth a read for journalists, especially fact-checkers.', 'raw': '80% of fact-checked misinformation claims involve media, with a rise in AI-generated content in 2023, according to a new study, “A Large-Scale Survey and Dataset of Media-Based Misinformation In-The-Wild.” Worth a read for journalists, especially fact-checkers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TL;DR:', 'raw': 'TL;DR:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 📊 135,838 fact checks analyzed', 'raw': '• 📊 135,838 fact checks analyzed'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 📸 80% of these claims involve media', 'raw': '• 📸 80% of these claims involve media'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🎥 Videos became more common starting in 2022, now more than 60% of fact-checked claims that include media', 'raw': '• 🎥 Videos became more common starting in 2022, now more than 60% of fact-checked claims that include media'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🤖 AI-generated content was rare until Spring of 2023, and then dramatically increased', 'raw': '• 🤖 AI-generated content was rare until Spring of 2023, and then dramatically increased'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• 🖼️ Image manipulations don’t require complex operations. Most of the time it’s context manipulations', 'raw': '• 🖼️ Image manipulations don’t require complex operations. Most of the time it’s context manipulations'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Read the paper here: ', 'raw': '• Read the paper here: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.11697'}, 'url': 'https://huggingface.co/papers/2405.11697', 'raw': 'https://huggingface.co/papers/2405.11697', 'label': 'AMMeBa: A Large-Scale Survey and Dataset of Media-Based Misinformation\n In-The-Wild (2405.11697)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• Take a look at the dataset: ', 'raw': '• Take a look at the dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'academic-datasets/AMMeBa'}, 'url': 'https://huggingface.co/datasets/academic-datasets/AMMeBa', 'raw': 'https://huggingface.co/datasets/academic-datasets/AMMeBa'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks ', 'raw': 'Thanks '}, {'type': 'mention', 'user': 'davanstrien', 'raw': '@davanstrien'}, {'type': 'text', 'value': ' for spotting it!', 'raw': ' for spotting it!'}, {'type': 'new_line', 'raw': '\n'}]","80% of fact-checked misinformation claims involve media, with a rise in AI-generated content in 2023, according to a new study, “A Large-Scale Survey and Dataset of Media-Based Misinformation In-The-Wild.” Worth a read for journalists, especially fact-checkers. + +TL;DR: +• 📊 135,838 fact checks analyzed +• 📸 80% of these claims involve media +• 🎥 Videos became more common starting in 2022, now more than 60% of fact-checked claims that include media +• 🤖 AI-generated content was rare until Spring of 2023, and then dramatically increased +• 🖼️ Image manipulations don’t require complex operations. Most of the time it’s context manipulations + +• Read the paper here: https://huggingface.co/papers/2405.11697 +• Take a look at the dataset: https://huggingface.co/datasets/academic-datasets/AMMeBa + +Thanks @davanstrien for spotting it! +",[],"[{'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638}]","[{'reaction': '❤️', 'users': ['LeroyDyer', 'osanseviero', 'BoredApeYachtClub', 'Cuiunbo', 'victor', 'MaggiR'], 'count': 6}, {'reaction': '👀', 'users': ['yumemio', 'BoredApeYachtClub', 'bmorphism'], 'count': 3}]",2024-05-21 20:11:24,2024-05-21 20:11:24.883,[],/posts/fdaudens/909060621015691,1793,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b9df9b22e5b0fdd501a113/i2yTGbK7pFnw9YLwZ7elp.jpeg,4.0,Akhil B,hakunamatata1997,827996970337614,"[{'type': 'text', 'value': 'Why salesforce removedSFR-Iterative-DPO-LLaMA-3-8B-R ? Any ideas?', 'raw': 'Why salesforce removedSFR-Iterative-DPO-LLaMA-3-8B-R ? Any ideas?'}]",Why salesforce removedSFR-Iterative-DPO-LLaMA-3-8B-R ? Any ideas?,[],[],"[{'reaction': '👀', 'users': ['victor', 'julien-c'], 'count': 2}]",2024-05-17 05:57:05,2024-05-21 18:08:11.365,"[{'_id': '61b9df9b22e5b0fdd501a113', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b9df9b22e5b0fdd501a113/i2yTGbK7pFnw9YLwZ7elp.jpeg', 'fullname': 'Akhil B', 'name': 'hakunamatata1997', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '65f5dc345f9b537bfb125988', 'avatarUrl': '/avatars/7fa9de162694d34a214ccd8ecb02fa0a.svg', 'fullname': 'Sergey Zubrilin', 'name': 'hiauiarau', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}, {'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}]",/posts/hakunamatata1997/827996970337614,1453,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/612ee6a7b960e78c6d2319d4/2Hu9BaAyXbyh1vt0v1Qui.jpeg,84.0,Qian Liu,SivilTaram,492055994557568,"[{'type': 'text', 'value': 'Introducing Sailor-14B Model and Sailor2 Project 🚢', 'raw': 'Introducing Sailor-14B Model and Sailor2 Project 🚢'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're thrilled to announce the release of the Sailor-14B models, including the Base and the Chat versions!"", 'raw': ""We're thrilled to announce the release of the Sailor-14B models, including the Base and the Chat versions!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅Built upon the Qwen1.5-14B model, the Base version follows a similar procedure as our Sailor-7B model.', 'raw': '✅Built upon the Qwen1.5-14B model, the Base version follows a similar procedure as our Sailor-7B model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅The Chat version is optimized using DPO on our in-house human preference dataset, yielding a better experience than our previous Chat models.', 'raw': '✅The Chat version is optimized using DPO on our in-house human preference dataset, yielding a better experience than our previous Chat models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏠Home: ', 'raw': '🏠Home: '}, {'type': 'link', 'href': 'https://sailorllm.github.io', 'raw': 'https://sailorllm.github.io'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗Model: ', 'raw': '🤗Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'sail/Sailor-14B-Chat'}, 'url': 'https://huggingface.co/sail/Sailor-14B-Chat', 'raw': 'https://huggingface.co/sail/Sailor-14B-Chat'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻Demo: ', 'raw': '💻Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'sail/Sailor-14B-Chat'}, 'url': 'https://huggingface.co/spaces/sail/Sailor-14B-Chat', 'raw': 'https://huggingface.co/spaces/sail/Sailor-14B-Chat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're also excited to introduce the Sailor2 project, ✨ an open collaboration opportunity for the entire community! ✨"", 'raw': ""We're also excited to introduce the Sailor2 project, ✨ an open collaboration opportunity for the entire community! ✨""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 The Sailor2 project aims to build a LLM with ~30B parameters, optimized for multiple South-East Asian languages, including Cebuano, Indonesian, Khmer, Lao, Minangkabau, Malay, Burmese, Sundanese, Javanese, Thai, and Vietnamese.', 'raw': '🌐 The Sailor2 project aims to build a LLM with ~30B parameters, optimized for multiple South-East Asian languages, including Cebuano, Indonesian, Khmer, Lao, Minangkabau, Malay, Burmese, Sundanese, Javanese, Thai, and Vietnamese.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎯The model will undergo continual pre-training from a base model proficient in both Chinese and English using nearly 800B SEA tokens, with an expected performance comparable to the most advanced business models for the above SEA languages.', 'raw': '🎯The model will undergo continual pre-training from a base model proficient in both Chinese and English using nearly 800B SEA tokens, with an expected performance comparable to the most advanced business models for the above SEA languages.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤝 Contribute your data, expertise, and ideas to shape the future of open-source LLMs for the SEA region.', 'raw': '🤝 Contribute your data, expertise, and ideas to shape the future of open-source LLMs for the SEA region.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌍 Everyone passionate about the SEA region is welcome aboard! Join the party and get involved by scanning the QR code! 🔍', 'raw': '🌍 Everyone passionate about the SEA region is welcome aboard! Join the party and get involved by scanning the QR code! 🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Let's sail together and enjoy the journey!⚓"", 'raw': ""Let's sail together and enjoy the journey!⚓""}, {'type': 'new_line', 'raw': '\n'}]","Introducing Sailor-14B Model and Sailor2 Project 🚢 + +We're thrilled to announce the release of the Sailor-14B models, including the Base and the Chat versions! + +✅Built upon the Qwen1.5-14B model, the Base version follows a similar procedure as our Sailor-7B model. +✅The Chat version is optimized using DPO on our in-house human preference dataset, yielding a better experience than our previous Chat models. + +🏠Home: https://sailorllm.github.io +🤗Model: https://huggingface.co/sail/Sailor-14B-Chat +💻Demo: https://huggingface.co/spaces/sail/Sailor-14B-Chat + +We're also excited to introduce the Sailor2 project, ✨ an open collaboration opportunity for the entire community! ✨ + +🌐 The Sailor2 project aims to build a LLM with ~30B parameters, optimized for multiple South-East Asian languages, including Cebuano, Indonesian, Khmer, Lao, Minangkabau, Malay, Burmese, Sundanese, Javanese, Thai, and Vietnamese. + +🎯The model will undergo continual pre-training from a base model proficient in both Chinese and English using nearly 800B SEA tokens, with an expected performance comparable to the most advanced business models for the above SEA languages. + +🤝 Contribute your data, expertise, and ideas to shape the future of open-source LLMs for the SEA region. + +🌍 Everyone passionate about the SEA region is welcome aboard! Join the party and get involved by scanning the QR code! 🔍 + +Let's sail together and enjoy the journey!⚓ +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/612ee6a7b960e78c6d2319d4/KQnXvm2pnyvK6TBKyTYcR.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['osanseviero', 'victor', 'dreamerdeo', 'AlexLaw830', 'nbroad', 'suthanhcong', 'real-jiakai'], 'count': 7}]",2024-05-17 00:35:37,2024-05-17 08:46:54.654,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '612ee6a7b960e78c6d2319d4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/612ee6a7b960e78c6d2319d4/2Hu9BaAyXbyh1vt0v1Qui.jpeg', 'fullname': 'Qian Liu', 'name': 'SivilTaram', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 84, 'isFollowing': False}]",/posts/SivilTaram/492055994557568,2444,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,774296537807613,"[{'type': 'text', 'value': 'Stable Cascade Full Tutorial for Windows, Massed Compute, RunPod & Kaggle — Predecessor of SD3 — 1-Click Install Amazing Gradio APP', 'raw': 'Stable Cascade Full Tutorial for Windows, Massed Compute, RunPod & Kaggle — Predecessor of SD3 — 1-Click Install Amazing Gradio APP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Stable Cascade is another amazing model for Stability AI', 'raw': 'Stable Cascade is another amazing model for Stability AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Weights are published', 'raw': 'Weights are published'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Stable Cascade Full Tutorial for Windows — Predecessor of SD3–1-Click Install Amazing Gradio APP : ', 'raw': 'Stable Cascade Full Tutorial for Windows — Predecessor of SD3–1-Click Install Amazing Gradio APP : '}, {'type': 'link', 'href': 'https://youtu.be/q0cYhalUUsc', 'raw': 'https://youtu.be/q0cYhalUUsc'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Stable Cascade Full Tutorial for Cloud — Predecessor of SD3 — Massed Compute, RunPod & Kaggle : ', 'raw': 'Stable Cascade Full Tutorial for Cloud — Predecessor of SD3 — Massed Compute, RunPod & Kaggle : '}, {'type': 'link', 'href': 'https://youtu.be/PKDeMdEObNo', 'raw': 'https://youtu.be/PKDeMdEObNo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Stable Cascade Full Tutorial for Windows, Massed Compute, RunPod & Kaggle — Predecessor of SD3 — 1-Click Install Amazing Gradio APP + +Stable Cascade is another amazing model for Stability AI + +Weights are published + +Stable Cascade Full Tutorial for Windows — Predecessor of SD3–1-Click Install Amazing Gradio APP : https://youtu.be/q0cYhalUUsc + +Stable Cascade Full Tutorial for Cloud — Predecessor of SD3 — Massed Compute, RunPod & Kaggle : https://youtu.be/PKDeMdEObNo + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/p6TXbj97OT5XTdRQ-WKVL.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/rwvXv4mJxPCeFdQIVMCHI.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/HnBLcmcfas3LjQwv6cOYC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/zqq5iHZdQaDFoJ-he9vKm.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Q1jEvJ4vDi07aAiq6AXqz.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/cmENpOJ9gsgnXYmYBjndl.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Bwxdl7rzEJ6ewVloycIKO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/EvTIfYDq9MNBbTi_aeQBA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/66FATq0xVlVRDRYGiQPhb.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/CWls9jEm6J4TldKIg_PGB.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/AZTHVYNP6NRE5oCNdyK3D.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/X8QexcF518Ge79zxtUjIp.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/LXRoTrszMC1cdU8vUOWo8.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/yusJmOy9bIKPLAz3t6YoE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/1bZ5Y_kd1hPAwZHqK0X08.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Ul6YUbK4yKKIN_dF4TCHw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/mP6tk60Ob_sEYVqmmpFOx.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/3a3ZZW41CVMrfX1-waC3C.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/bTKpzGSyVtuucHpfYyeGc.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/b6_J70RoCxQxsNUWfhXy-.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/u9AxMuI-Fz1khqkcELccv.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Mk-OFOyrtAkeCqEN8XvIx.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/BeIwhUqmZdq4BAjAGHj9Y.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/nrZOrlr6cpzTAEaClPgIl.png'}]",[],"[{'reaction': '🔥', 'users': ['beno-rohman', 'zuppif', 'victor', 'Ramikan-BR'], 'count': 4}]",2024-05-17 00:30:46,2024-05-17 00:30:46.610,[],/posts/MonsterMMORPG/774296537807613,1548,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,739383992641014,"[{'type': 'text', 'value': '🎉 A new LLM is launched! 🚀 ', 'raw': '🎉 A new LLM is launched! 🚀 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""After checking if it's open-source or not, 🤔 "", 'raw': ""After checking if it's open-source or not, 🤔 ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'you rush to see the benchmarks... 🏃\u200d♂️💨', 'raw': 'you rush to see the benchmarks... 🏃\u200d♂️💨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Which benchmark does everyone check first? 🔍', 'raw': 'Which benchmark does everyone check first? 🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MMLU (Massive Multitask Language Understanding)? 📚', 'raw': 'MMLU (Massive Multitask Language Understanding)? 📚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Benchmarks like MMLU reaching saturation... most of the time the performance does not translate to real-world use cases! 🌐❗', 'raw': 'Benchmarks like MMLU reaching saturation... most of the time the performance does not translate to real-world use cases! 🌐❗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Meet MMLU-Pro, released by TIGER-Lab on ', 'raw': 'Meet MMLU-Pro, released by TIGER-Lab on '}, {'type': 'mention', 'user': 'huggingface', 'raw': '@huggingface'}, {'type': 'text', 'value': ' ! 🐯🌍', 'raw': ' ! 🐯🌍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧪 12,217 questions across biology, business, chemistry, computer science, economics, engineering, health, history, law, mathematics, philosophy, physics, and psychology carefully validated by humans 🧑\u200d🔬', 'raw': '🧪 12,217 questions across biology, business, chemistry, computer science, economics, engineering, health, history, law, mathematics, philosophy, physics, and psychology carefully validated by humans 🧑\u200d🔬'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔟 Goes to 10 options per question instead of 4, this increase in options will make the evaluation more realistic and reduce random guessing 🎯', 'raw': '🔟 Goes to 10 options per question instead of 4, this increase in options will make the evaluation more realistic and reduce random guessing 🎯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 56% of questions come from MMLU, 34% from STEM websites, and the rest from TheoremQA and SciBench 📈', 'raw': '📊 56% of questions come from MMLU, 34% from STEM websites, and the rest from TheoremQA and SciBench 📈'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 LLMs with weak chain-of-thought reasoning tend to perform lower, indicating it is more challenging and representative of real-world expectations 🧠💡', 'raw': '🤖 LLMs with weak chain-of-thought reasoning tend to perform lower, indicating it is more challenging and representative of real-world expectations 🧠💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Any guess who tops it and who bombs it? 🤔📉📈', 'raw': 'Any guess who tops it and who bombs it? 🤔📉📈'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GPT-4o drops by 17% (from 0.887 to 0.7149) 📉', 'raw': 'GPT-4o drops by 17% (from 0.887 to 0.7149) 📉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Llama-3-70B drops by 27% (from 0.820 to 0.5541) 📉', 'raw': 'Llama-3-70B drops by 27% (from 0.820 to 0.5541) 📉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 ', 'raw': '🔗 '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'TIGER-Lab/MMLU-Pro'}, 'url': 'https://huggingface.co/datasets/TIGER-Lab/MMLU-Pro', 'raw': 'https://huggingface.co/datasets/TIGER-Lab/MMLU-Pro'}]","🎉 A new LLM is launched! 🚀 +After checking if it's open-source or not, 🤔 +you rush to see the benchmarks... 🏃‍♂️💨 + +Which benchmark does everyone check first? 🔍 + +MMLU (Massive Multitask Language Understanding)? 📚 + +Benchmarks like MMLU reaching saturation... most of the time the performance does not translate to real-world use cases! 🌐❗ + +Meet MMLU-Pro, released by TIGER-Lab on @huggingface ! 🐯🌍 + +🧪 12,217 questions across biology, business, chemistry, computer science, economics, engineering, health, history, law, mathematics, philosophy, physics, and psychology carefully validated by humans 🧑‍🔬 + +🔟 Goes to 10 options per question instead of 4, this increase in options will make the evaluation more realistic and reduce random guessing 🎯 + +📊 56% of questions come from MMLU, 34% from STEM websites, and the rest from TheoremQA and SciBench 📈 + +🤖 LLMs with weak chain-of-thought reasoning tend to perform lower, indicating it is more challenging and representative of real-world expectations 🧠💡 + +Any guess who tops it and who bombs it? 🤔📉📈 + +GPT-4o drops by 17% (from 0.887 to 0.7149) 📉 +Llama-3-70B drops by 27% (from 0.820 to 0.5541) 📉 + +🔗 https://huggingface.co/datasets/TIGER-Lab/MMLU-Pro","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/Rd16K3LaKXUw2naa7FvPv.png'}]",[],"[{'reaction': '🔥', 'users': ['clem', 'zaursamedov1', 'osanseviero'], 'count': 3}, {'reaction': '❤️', 'users': ['clem', 'zaursamedov1', 'clefourrier'], 'count': 3}]",2024-05-16 22:08:59,2024-05-16 22:35:32.299,"[{'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '662c66989e6d371ab7b7f414', 'avatarUrl': '/avatars/e4fa0d942a2e23bffb365601a34c4c9d.svg', 'fullname': 'Z', 'name': 'Zachyyypoo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/singhsidhukuldeep/739383992641014,1337,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63a7422854f1d0225b075bfc/XGYAcDPZG5ZEsNBWG6guw.jpeg,75.0,lhl,leonardlin,182188183152735,"[{'type': 'text', 'value': ""I've been doing some evals and tuning, and this chat template repo maintained by "", 'raw': ""I've been doing some evals and tuning, and this chat template repo maintained by ""}, {'type': 'mention', 'user': 'chujiezheng', 'raw': '@chujiezheng'}, {'type': 'text', 'value': ' is great: ', 'raw': ' is great: '}, {'type': 'link', 'href': 'https://github.com/chujiezheng/chat_templates', 'raw': 'https://github.com/chujiezheng/chat_templates'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's also a simple script for checking what the output looks like:"", 'raw': ""Here's also a simple script for checking what the output looks like:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'from transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(""augmxnt/shisa-7b-v1"")\nmessages = [\n {\'role\': \'user\', \'content\': \'This is the first user input.\'},\n {\'role\': \'assistant\', \'content\': \'This is the first assistant response.\'},\n {\'role\': \'user\', \'content\': \'This is the second user input.\'},\n]\n\nprint()\nprint(\'Chat Template:\')\nprint(tokenizer.chat_template)\nprint()\nprint(\'---\')\nprint()\n\nprint(tokenizer.apply_chat_template(messages, tokenize=False))', 'raw': '```\nfrom transformers import AutoTokenizer\n\ntokenizer = AutoTokenizer.from_pretrained(""augmxnt/shisa-7b-v1"")\nmessages = [\n {\'role\': \'user\', \'content\': \'This is the first user input.\'},\n {\'role\': \'assistant\', \'content\': \'This is the first assistant response.\'},\n {\'role\': \'user\', \'content\': \'This is the second user input.\'},\n]\n\nprint()\nprint(\'Chat Template:\')\nprint(tokenizer.chat_template)\nprint()\nprint(\'---\')\nprint()\n\nprint(tokenizer.apply_chat_template(messages, tokenize=False))\n```'}]","I've been doing some evals and tuning, and this chat template repo maintained by @chujiezheng is great: https://github.com/chujiezheng/chat_templates + +Here's also a simple script for checking what the output looks like: +``` +from transformers import AutoTokenizer + +tokenizer = AutoTokenizer.from_pretrained(""augmxnt/shisa-7b-v1"") +messages = [ + {'role': 'user', 'content': 'This is the first user input.'}, + {'role': 'assistant', 'content': 'This is the first assistant response.'}, + {'role': 'user', 'content': 'This is the second user input.'}, +] + +print() +print('Chat Template:') +print(tokenizer.chat_template) +print() +print('---') +print() + +print(tokenizer.apply_chat_template(messages, tokenize=False)) +```",[],"[{'_id': '610b70452719facd4ea85e28', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/610b70452719facd4ea85e28/S7nMy7D0Rxq0VIVblhYDG.jpeg', 'fullname': 'Chujie Zheng', 'name': 'chujiezheng', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 41}]","[{'reaction': '❤️', 'users': ['chujiezheng', 'osanseviero', 'not-lain'], 'count': 3}]",2024-05-16 21:14:25,2024-05-16 21:14:25.515,[],/posts/leonardlin/182188183152735,1262,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/HOCJv7WE2C4eqqqppDFpA.png,1148.0,Lewdiculous (AetherArchitectural),Lewdiculous,958510375628116,"[{'type': 'text', 'value': 'More context for your Pascal GPU or older!', 'raw': 'More context for your Pascal GPU or older!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Update: Now available in the official releases of KoboldCpp!', 'raw': 'Update: Now available in the official releases of KoboldCpp!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[releases] ', 'raw': '[releases] '}, {'type': 'link', 'href': 'https://github.com/LostRuins/koboldcpp/releases/latest', 'raw': 'https://github.com/LostRuins/koboldcpp/releases/latest'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'These are great news for all the users with GTX 10XX, P40...', 'raw': 'These are great news for all the users with GTX 10XX, P40...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Flash Attention implementation for older NVIDIA GPUs without requiring Tensor Cores has come to llama.cpp in the last few days, and should be merged in the next version of KoboldCpp, you can already try it with another fork or by building it.', 'raw': 'Flash Attention implementation for older NVIDIA GPUs without requiring Tensor Cores has come to llama.cpp in the last few days, and should be merged in the next version of KoboldCpp, you can already try it with another fork or by building it.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[Mentioned KCPP fork] ', 'raw': '[Mentioned KCPP fork] '}, {'type': 'link', 'href': 'https://github.com/Nexesenex/kobold.cpp/releases/latest', 'raw': 'https://github.com/Nexesenex/kobold.cpp/releases/latest'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '[PR] ', 'raw': '[PR] '}, {'type': 'link', 'href': 'https://github.com/ggerganov/llama.cpp/pull/7188', 'raw': 'https://github.com/ggerganov/llama.cpp/pull/7188'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You should expect less VRAM usage for the same context, allowing you to experience higher contexts with your current GPU.', 'raw': 'You should expect less VRAM usage for the same context, allowing you to experience higher contexts with your current GPU.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""There have also been reported final tokens/second speed improvements for inference, so that's also grand!"", 'raw': ""There have also been reported final tokens/second speed improvements for inference, so that's also grand!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you have tried it, I'd like to hear your experiences with --flashattention so far, especially for this implementation and for the large number of Pascal (GTX 10XX, P40...) cards."", 'raw': ""If you have tried it, I'd like to hear your experiences with --flashattention so far, especially for this implementation and for the large number of Pascal (GTX 10XX, P40...) cards.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Discussion linked bellow, with more links to relevant information:', 'raw': 'Discussion linked bellow, with more links to relevant information:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/LWDCLS/LLM-Discussions/discussions/11', 'raw': 'https://huggingface.co/LWDCLS/LLM-Discussions/discussions/11'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cheers!', 'raw': 'Cheers!'}]","More context for your Pascal GPU or older! + +Update: Now available in the official releases of KoboldCpp! +[releases] https://github.com/LostRuins/koboldcpp/releases/latest + +These are great news for all the users with GTX 10XX, P40... + +Flash Attention implementation for older NVIDIA GPUs without requiring Tensor Cores has come to llama.cpp in the last few days, and should be merged in the next version of KoboldCpp, you can already try it with another fork or by building it. + +[Mentioned KCPP fork] https://github.com/Nexesenex/kobold.cpp/releases/latest + +[PR] https://github.com/ggerganov/llama.cpp/pull/7188 + +You should expect less VRAM usage for the same context, allowing you to experience higher contexts with your current GPU. + +There have also been reported final tokens/second speed improvements for inference, so that's also grand! + +If you have tried it, I'd like to hear your experiences with --flashattention so far, especially for this implementation and for the large number of Pascal (GTX 10XX, P40...) cards. + +Discussion linked bellow, with more links to relevant information: + +https://huggingface.co/LWDCLS/LLM-Discussions/discussions/11 + +Cheers!",[],[],"[{'reaction': '🚀', 'users': ['Lewdiculous', 'gate369', 'odyss3y', 'fluffyone', 'Ramikan-BR', 'Bjorno'], 'count': 6}, {'reaction': '👍', 'users': ['malificar', 'xpgx1', 'Ramikan-BR'], 'count': 3}, {'reaction': '❤️', 'users': ['fluffyone', 'Ramikan-BR', 'fjall'], 'count': 3}, {'reaction': '🔥', 'users': ['AlekseiPravdin', 'Ramikan-BR', 'svenard'], 'count': 3}, {'reaction': '👀', 'users': ['Ramikan-BR', 'den0620'], 'count': 2}, {'reaction': '➕', 'users': ['Ardvark123'], 'count': 1}]",2024-05-16 18:49:08,2024-05-24 11:59:20.416,"[{'_id': '65f115216c162dbd552fb41d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65f115216c162dbd552fb41d/-hfb5OFTL5Ex4lJziSgxg.png', 'fullname': 'Ardvark-san', 'name': 'Ardvark123', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65d4cf2693a0a3744a27536c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/HOCJv7WE2C4eqqqppDFpA.png', 'fullname': 'Lewdiculous (AetherArchitectural)', 'name': 'Lewdiculous', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1148, 'isFollowing': False}, {'_id': '6335a8ea169f33bf8bf128c7', 'avatarUrl': '/avatars/ac0c386e729cccdc8290cb5d28ddc4a4.svg', 'fullname': 'Virt', 'name': 'Virt-io', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 119, 'isFollowing': False}]",/posts/Lewdiculous/958510375628116,56023,,24 +https://cdn-avatars.huggingface.co/v1/production/uploads/6515cfef26224cf8b1eb8e2f/S063NW-KtzuJw5vdibuqR.png,25.0,Vasiliy,Kvikontent,157604583355267,"[{'type': 'text', 'value': ""Just bought PRO but I don't understands how to use any image generation models in spaces beacause I don't know how to decode or process images after generation. Help!"", 'raw': ""Just bought PRO but I don't understands how to use any image generation models in spaces beacause I don't know how to decode or process images after generation. Help!""}]",Just bought PRO but I don't understands how to use any image generation models in spaces beacause I don't know how to decode or process images after generation. Help!,[],[],[],2024-05-16 18:01:28,2024-05-17 12:20:31.899,"[{'_id': '66454996ff120b25ff92eefc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/BtQAz3wZXjEEI8Yi2dIqV.jpeg', 'fullname': 'Carol Wild', 'name': 'FireHug', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6515cfef26224cf8b1eb8e2f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6515cfef26224cf8b1eb8e2f/S063NW-KtzuJw5vdibuqR.png', 'fullname': 'Vasiliy', 'name': 'Kvikontent', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 25, 'isFollowing': False}]",/posts/Kvikontent/157604583355267,2365,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,421684109174186,"[{'type': 'text', 'value': 'Access to computational resources is key for democratizing AI, in all domains. ', 'raw': 'Access to computational resources is key for democratizing AI, in all domains. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We cooked up something we're proud of: Hugging Face is committing $10 million in free GPUs to help developers create new AI technologies."", 'raw': ""We cooked up something we're proud of: Hugging Face is committing $10 million in free GPUs to help developers create new AI technologies.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '“AI should not be held in the hands of the few. With this commitment to open-source developers, we’re excited to see what everyone will cook up next in the spirit of collaboration and transparency.” — ', 'raw': '“AI should not be held in the hands of the few. With this commitment to open-source developers, we’re excited to see what everyone will cook up next in the spirit of collaboration and transparency.” — '}, {'type': 'mention', 'user': 'clem', 'raw': '@clem'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read the exclusive by Kylie Robison: ', 'raw': 'Read the exclusive by Kylie Robison: '}, {'type': 'link', 'href': 'https://www.theverge.com/2024/5/16/24156755/hugging-face-celement-delangue-free-shared-gpus-ai', 'raw': 'https://www.theverge.com/2024/5/16/24156755/hugging-face-celement-delangue-free-shared-gpus-ai'}]","Access to computational resources is key for democratizing AI, in all domains. + +We cooked up something we're proud of: Hugging Face is committing $10 million in free GPUs to help developers create new AI technologies. + +“AI should not be held in the hands of the few. With this commitment to open-source developers, we’re excited to see what everyone will cook up next in the spirit of collaboration and transparency.” — @clem + +Read the exclusive by Kylie Robison: https://www.theverge.com/2024/5/16/24156755/hugging-face-celement-delangue-free-shared-gpus-ai",[],"[{'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489}]","[{'reaction': '❤️', 'users': ['clem', 'zaursamedov1', 'osanseviero', 'mexicanamerican', 'sa8', 'MexIvanov', 'KingNish', 'not-lain'], 'count': 8}, {'reaction': '👍', 'users': ['Winnougan'], 'count': 1}]",2024-05-16 17:39:45,2024-05-16 17:39:45.212,[],/posts/fdaudens/421684109174186,1317,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/638dd709ca00481637c050d1/Kk00-u2rI6P3mEKaiA_N0.jpeg,7.0,Grantley Cullar,gospacedev,390953249574526,"[{'type': 'text', 'value': 'Thank you ', 'raw': 'Thank you '}, {'type': 'mention', 'user': 'Niansuh', 'raw': '@Niansuh'}, {'type': 'text', 'value': ' for starting a space and sharing your AI assistant application in Narra AI!', 'raw': ' for starting a space and sharing your AI assistant application in Narra AI!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'narra-ai/ChatGPT'}, 'url': 'https://huggingface.co/spaces/narra-ai/ChatGPT', 'raw': 'https://huggingface.co/spaces/narra-ai/ChatGPT'}]","Thank you @Niansuh for starting a space and sharing your AI assistant application in Narra AI! + +https://huggingface.co/spaces/narra-ai/ChatGPT","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/638dd709ca00481637c050d1/JIQFyB7Za3MIIWh6RJHr4.png'}]","[{'_id': '64cba00d710645aa7b04f281', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64cba00d710645aa7b04f281/a_-LPwd4wqRyi8sJ1QxjI.jpeg', 'fullname': 'Husnain', 'name': 'Niansuh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 75}]","[{'reaction': '❤️', 'users': ['Niansuh'], 'count': 1}]",2024-05-16 16:26:29,2024-05-30 13:40:54.316,"[{'_id': '64cba00d710645aa7b04f281', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64cba00d710645aa7b04f281/a_-LPwd4wqRyi8sJ1QxjI.jpeg', 'fullname': 'Husnain', 'name': 'Niansuh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 75, 'isFollowing': False}]",/posts/gospacedev/390953249574526,1313,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/61d6f47bd49065ee28a1ee7d/RCcQBAABHU9fVVqzoDqTA.jpeg,3.0,Frederic Branchaud-Charron,Dref360,480891530274405,"[{'type': 'text', 'value': 'Baal, our Bayesian Active Learning library is working on a major version and we want to know more about you! ', 'raw': 'Baal, our Bayesian Active Learning library is working on a major version and we want to know more about you! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you use Baal for Active Learning, Uncertainty Estimation or Bayesian Deep Learning, we would **love** to talk to you! 😎 ', 'raw': 'If you use Baal for Active Learning, Uncertainty Estimation or Bayesian Deep Learning, we would **love** to talk to you! 😎 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In more detail, we want to understand when our users use our library and how.', 'raw': 'In more detail, we want to understand when our users use our library and how.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can take a spot in our Calendly: ', 'raw': 'You can take a spot in our Calendly: '}, {'type': 'link', 'href': 'https://calendly.com/baal-org/30min?month=2024-05', 'raw': 'https://calendly.com/baal-org/30min?month=2024-05'}]","Baal, our Bayesian Active Learning library is working on a major version and we want to know more about you! + +If you use Baal for Active Learning, Uncertainty Estimation or Bayesian Deep Learning, we would **love** to talk to you! 😎 + +In more detail, we want to understand when our users use our library and how. + +You can take a spot in our Calendly: https://calendly.com/baal-org/30min?month=2024-05",[],[],"[{'reaction': '🚀', 'users': ['Dref360', 'elonmuskceo', 'mattmdjaga'], 'count': 3}]",2024-05-16 15:41:01,2024-05-16 15:41:01.046,[],/posts/Dref360/480891530274405,1104,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,933621658616215,"[{'type': 'text', 'value': 'In my ongoing quest to learn more about building synthetic datasets, I\'ve created an ""Awesome Synthetic Datasets"" list. ', 'raw': 'In my ongoing quest to learn more about building synthetic datasets, I\'ve created an ""Awesome Synthetic Datasets"" list. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The aim is to lightly curate a collection of resources, tutorials, and tools for generating synthetic datasets using large language models. ', 'raw': 'The aim is to lightly curate a collection of resources, tutorials, and tools for generating synthetic datasets using large language models. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I plan to add some ""key techniques"" to the repo, but for now, it focuses on important datasets, papers, and tools. ', 'raw': 'I plan to add some ""key techniques"" to the repo, but for now, it focuses on important datasets, papers, and tools. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 ', 'raw': '🔗 '}, {'type': 'link', 'href': 'https://github.com/davanstrien/awesome-synthetic-datasets', 'raw': 'https://github.com/davanstrien/awesome-synthetic-datasets'}]","In my ongoing quest to learn more about building synthetic datasets, I've created an ""Awesome Synthetic Datasets"" list. + +The aim is to lightly curate a collection of resources, tutorials, and tools for generating synthetic datasets using large language models. + +I plan to add some ""key techniques"" to the repo, but for now, it focuses on important datasets, papers, and tools. + +🔗 https://github.com/davanstrien/awesome-synthetic-datasets",[],[],"[{'reaction': '🔥', 'users': ['asoria', 'CTRLMerz'], 'count': 2}, {'reaction': '👍', 'users': ['eliolio'], 'count': 1}]",2024-05-14 14:13:58,2024-05-14 14:13:58.389,[],/posts/davanstrien/933621658616215,953,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,182984250825015,"[{'type': 'text', 'value': '🚀👕🌟 New Research Alert - SIGGRAPH 2024 (Avatars Collection)! 🌟👚🚀', 'raw': '🚀👕🌟 New Research Alert - SIGGRAPH 2024 (Avatars Collection)! 🌟👚🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: LayGA: Layered Gaussian Avatars for Animatable Clothing Transfer 🔝', 'raw': '📄 Title: LayGA: Layered Gaussian Avatars for Animatable Clothing Transfer 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: LayGA is a novel method for animatable clothing transfer that separates the body and clothing into two layers for improved photorealism and accurate clothing tracking, outperforming existing methods.', 'raw': '📝 Description: LayGA is a novel method for animatable clothing transfer that separates the body and clothing into two layers for improved photorealism and accurate clothing tracking, outperforming existing methods.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Siyou Lin, Zhe Li, Zhaoqi Su, Zerong Zheng, Hongwen Zhang, and Yebin Liu', 'raw': '👥 Authors: Siyou Lin, Zhe Li, Zhaoqi Su, Zerong Zheng, Hongwen Zhang, and Yebin Liu'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: SIGGRAPH, 28 Jul – 1 Aug, 2024 | Denver CO, USA 🇺🇸', 'raw': '📅 Conference: SIGGRAPH, 28 Jul – 1 Aug, 2024 | Denver CO, USA 🇺🇸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.07319'}, 'url': 'https://huggingface.co/papers/2405.07319', 'raw': 'https://huggingface.co/papers/2405.07319', 'label': 'LayGA: Layered Gaussian Avatars for Animatable Clothing Transfer (2405.07319)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://jsnln.github.io/layga/index.html', 'raw': 'https://jsnln.github.io/layga/index.html'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #LayGA #AnimatableClothingTransfer #VirtualTryOn #AvatarTechnology #SIGGRAPH2024 #ComputerGraphics #DeepLearning #ComputerVision #Innovation', 'raw': '🔍 Keywords: #LayGA #AnimatableClothingTransfer #VirtualTryOn #AvatarTechnology #SIGGRAPH2024 #ComputerGraphics #DeepLearning #ComputerVision #Innovation'}]","🚀👕🌟 New Research Alert - SIGGRAPH 2024 (Avatars Collection)! 🌟👚🚀 +📄 Title: LayGA: Layered Gaussian Avatars for Animatable Clothing Transfer 🔝 + +📝 Description: LayGA is a novel method for animatable clothing transfer that separates the body and clothing into two layers for improved photorealism and accurate clothing tracking, outperforming existing methods. + +👥 Authors: Siyou Lin, Zhe Li, Zhaoqi Su, Zerong Zheng, Hongwen Zhang, and Yebin Liu + +📅 Conference: SIGGRAPH, 28 Jul – 1 Aug, 2024 | Denver CO, USA 🇺🇸 + +📄 Paper: https://huggingface.co/papers/2405.07319 + +🌐 Github Page: https://jsnln.github.io/layga/index.html + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #LayGA #AnimatableClothingTransfer #VirtualTryOn #AvatarTechnology #SIGGRAPH2024 #ComputerGraphics #DeepLearning #ComputerVision #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/eXPh-ANxcjpS17IpWB2UD.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/PkImD8pxyTqp2F1Oc9tFn.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/02paJFhQf9G7TPAkl85JP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Ht4HNmKhe4OeVFzLO_KY3.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/i0FPzgOEvAoy_Y9bals4r.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '🔥', 'users': ['DmitryRyumin', 'abdulmoeedirshad'], 'count': 2}]",2024-05-14 12:52:44,2024-05-14 12:54:24.025,[],/posts/DmitryRyumin/182984250825015,1071,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5fa19f4ba13e063b8b2b5e11/nGVHdTYX2udnt-K8mqY27.jpeg,1494.0,Abhishek Thakur,abhishek,196339313433568,"[{'type': 'text', 'value': '🚨 NEW TASK ALERT 🚨', 'raw': '🚨 NEW TASK ALERT 🚨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎉 AutoTrain now supports Object Detection! 🎉', 'raw': '🎉 AutoTrain now supports Object Detection! 🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Transform your projects with these powerful new features:', 'raw': 'Transform your projects with these powerful new features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Fine-tune any supported model from the Hugging Face Hub', 'raw': '🔹 Fine-tune any supported model from the Hugging Face Hub'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Seamless logging with TensorBoard or W&B', 'raw': '🔹 Seamless logging with TensorBoard or W&B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Support for local and hub datasets', 'raw': '🔹 Support for local and hub datasets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Configurable training for tailored results', 'raw': '🔹 Configurable training for tailored results'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Train locally or leverage Hugging Face Spaces', 'raw': '🔹 Train locally or leverage Hugging Face Spaces'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔹 Deployment-ready with API inference or Hugging Face endpoints', 'raw': '🔹 Deployment-ready with API inference or Hugging Face endpoints'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AutoTrain: ', 'raw': 'AutoTrain: '}, {'type': 'link', 'href': 'https://hf.co/autotrain', 'raw': 'https://hf.co/autotrain'}]","🚨 NEW TASK ALERT 🚨 +🎉 AutoTrain now supports Object Detection! 🎉 +Transform your projects with these powerful new features: +🔹 Fine-tune any supported model from the Hugging Face Hub +🔹 Seamless logging with TensorBoard or W&B +🔹 Support for local and hub datasets +🔹 Configurable training for tailored results +🔹 Train locally or leverage Hugging Face Spaces +🔹 Deployment-ready with API inference or Hugging Face endpoints +AutoTrain: https://hf.co/autotrain",[],[],"[{'reaction': '🔥', 'users': ['lhoestq', 'abhishek', 'mpsampat', 'Ramikan-BR'], 'count': 4}, {'reaction': '👀', 'users': ['Ramikan-BR', 'papercanteen111'], 'count': 2}, {'reaction': '👍', 'users': ['sourceoftruthdata', 'AndreaDrew'], 'count': 2}, {'reaction': '❤️', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '🚀', 'users': ['Ramikan-BR'], 'count': 1}]",2024-05-14 11:28:06,2024-05-14 11:28:06.909,[],/posts/abhishek/196339313433568,2943,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/644a114201e18bf93a6eff8f/rYdjfaJ-4-SlT3PRmChdc.jpeg,11.0,Mouhu,MouhuAI,453935765698578,"[{'type': 'text', 'value': '今日はGoogle I/Oです。', 'raw': '今日はGoogle I/Oです。'}]",今日はGoogle I/Oです。,[],[],"[{'reaction': '😎', 'users': ['MouhuAI'], 'count': 1}]",2024-05-14 11:06:59,2024-05-14 11:06:59.249,[],/posts/MouhuAI/453935765698578,1011,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1649143001781-624bebf604abc7ebb01789af.jpeg,4512.0,Apolinário from multimodal AI art,multimodalart,526931068207923,"[{'type': 'text', 'value': 'The first open Stable Diffusion 3-like architecture model is JUST out 💣 - but it is not SD3! 🤔', 'raw': 'The first open Stable Diffusion 3-like architecture model is JUST out 💣 - but it is not SD3! 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It is ', 'raw': 'It is '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Tencent-Hunyuan/HunyuanDiT'}, 'url': 'https://huggingface.co/Tencent-Hunyuan/HunyuanDiT', 'raw': 'https://huggingface.co/Tencent-Hunyuan/HunyuanDiT'}, {'type': 'text', 'value': ' by Tencent, a 1.5B parameter DiT (diffusion transformer) text-to-image model 🖼️✨, trained with multi-lingual CLIP + multi-lingual T5 text-encoders for english 🤝 chinese understanding ', 'raw': ' by Tencent, a 1.5B parameter DiT (diffusion transformer) text-to-image model 🖼️✨, trained with multi-lingual CLIP + multi-lingual T5 text-encoders for english 🤝 chinese understanding '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out by yourself here ▶️ ', 'raw': 'Try it out by yourself here ▶️ '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/multimodalart/HunyuanDiT', 'raw': 'https://huggingface.co/spaces/multimodalart/HunyuanDiT'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""(a bit too slow as the model is chunky and the research code isn't super optimized for inference speed yet) "", 'raw': ""(a bit too slow as the model is chunky and the research code isn't super optimized for inference speed yet) ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the paper they claim to be SOTA open source based on human preference evaluation! ', 'raw': 'In the paper they claim to be SOTA open source based on human preference evaluation! '}]","The first open Stable Diffusion 3-like architecture model is JUST out 💣 - but it is not SD3! 🤔 + +It is https://huggingface.co/Tencent-Hunyuan/HunyuanDiT by Tencent, a 1.5B parameter DiT (diffusion transformer) text-to-image model 🖼️✨, trained with multi-lingual CLIP + multi-lingual T5 text-encoders for english 🤝 chinese understanding + +Try it out by yourself here ▶️ https://huggingface.co/spaces/multimodalart/HunyuanDiT +(a bit too slow as the model is chunky and the research code isn't super optimized for inference speed yet) + +In the paper they claim to be SOTA open source based on human preference evaluation! ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/624bebf604abc7ebb01789af/U1RIzViJWgZK_twNS97ZX.png'}]",[],"[{'reaction': '❤️', 'users': ['ruhiparveen', 'ayush7'], 'count': 2}]",2024-05-14 10:29:26,2024-05-14 10:35:49.200,[],/posts/multimodalart/526931068207923,28409,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/65eb213f00f1a613daafd462/uZuhXpD2B2aadwQxoL7DK.jpeg,57.0,Phenix Rhyder,phenixrhyder,629068557683774,"[{'type': 'text', 'value': 'The boy king. This was timeless diffusion I think. Or retrolife. I forget actually, but like the cartoon effect', 'raw': 'The boy king. This was timeless diffusion I think. Or retrolife. I forget actually, but like the cartoon effect'}]","The boy king. This was timeless diffusion I think. Or retrolife. I forget actually, but like the cartoon effect","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/cq2stL8-BNAJiil9TO8Mp.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/ypdq0JZ9LfHREopPRyeDN.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/2En79FTK4Qe4rx3npdz93.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['qnguyen3'], 'count': 1}]",2024-05-14 10:27:25,2024-05-14 10:27:25.984,[],/posts/phenixrhyder/629068557683774,2680,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg,211.0,Bram Vanroy,BramVanroy,264774721266202,"[{'type': 'text', 'value': 'All my models seem to be plagued by infinite lists. When you ask a question that requires it to write a list, it most often keeps adding bullet points or enumeration. I am wondering whether this is a result of using chatty GPT-4 as DPO preferences. Any thoughts? ', 'raw': 'All my models seem to be plagued by infinite lists. When you ask a question that requires it to write a list, it most often keeps adding bullet points or enumeration. I am wondering whether this is a result of using chatty GPT-4 as DPO preferences. Any thoughts? '}]","All my models seem to be plagued by infinite lists. When you ask a question that requires it to write a list, it most often keeps adding bullet points or enumeration. I am wondering whether this is a result of using chatty GPT-4 as DPO preferences. Any thoughts? ",[],[],"[{'reaction': '👀', 'users': ['victor'], 'count': 1}]",2024-05-14 07:46:10,2024-05-14 10:25:12.385,"[{'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}]",/posts/BramVanroy/264774721266202,2315,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png,122.0,Asankhaya Sharma,codelion,997034868620906,"[{'type': 'text', 'value': 'The new gpt-4o model seems to a very good coder. OpenAI reported a 90+ score on ', 'raw': 'The new gpt-4o model seems to a very good coder. OpenAI reported a 90+ score on '}, {'type': 'link', 'href': 'https://huggingface.co/datasets/openai_humaneval', 'raw': 'https://huggingface.co/datasets/openai_humaneval'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We tried the new model on our ', 'raw': 'We tried the new model on our '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'patched-codes/static-analysis-eval'}, 'url': 'https://huggingface.co/datasets/patched-codes/static-analysis-eval', 'raw': 'https://huggingface.co/datasets/patched-codes/static-analysis-eval'}, {'type': 'text', 'value': ' which evaluates the model on vulnerability remediation. gpt-4o has reclaimed the top spot on our leaderboard (from ', 'raw': ' which evaluates the model on vulnerability remediation. gpt-4o has reclaimed the top spot on our leaderboard (from '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'meta-llama/Meta-Llama-3-70B-Instruct'}, 'url': 'https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct', 'raw': 'https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct'}, {'type': 'text', 'value': '). ', 'raw': '). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can now use the new model with our open-source framework PatchWork - ', 'raw': 'You can now use the new model with our open-source framework PatchWork - '}, {'type': 'link', 'href': 'https://github.com/patched-codes/patchwork', 'raw': 'https://github.com/patched-codes/patchwork'}, {'type': 'text', 'value': ' by passing ', 'raw': ' by passing '}, {'type': 'inline_code', 'code': 'model=gpt-4o', 'raw': '`model=gpt-4o`'}, {'type': 'text', 'value': ' on the CLI.', 'raw': ' on the CLI.'}]","The new gpt-4o model seems to a very good coder. OpenAI reported a 90+ score on https://huggingface.co/datasets/openai_humaneval + +We tried the new model on our https://huggingface.co/datasets/patched-codes/static-analysis-eval which evaluates the model on vulnerability remediation. gpt-4o has reclaimed the top spot on our leaderboard (from https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct). + +You can now use the new model with our open-source framework PatchWork - https://github.com/patched-codes/patchwork by passing `model=gpt-4o` on the CLI.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f32eab52ad88c930bb3f3b/bN7hSZDSw4x_lBfHbT0YE.png'}]",[],"[{'reaction': '👍', 'users': ['codelion', 'Taylor658', 'victor', 'tayo6', 'odyss3y', 'Abdollahi'], 'count': 6}, {'reaction': '🔥', 'users': ['codelion', 'victor', 'DmitryRyumin'], 'count': 3}, {'reaction': '🚀', 'users': ['codelion'], 'count': 1}]",2024-05-14 04:34:30,2024-05-15 17:54:35.428,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '62f32eab52ad88c930bb3f3b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png', 'fullname': 'Asankhaya Sharma', 'name': 'codelion', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 122, 'isFollowing': False}, {'_id': '641b754d1911d3be6745cce9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg', 'fullname': 'atayloraerospace', 'name': 'Taylor658', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 112, 'isFollowing': False}]",/posts/codelion/997034868620906,1797,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,431728663194168,"[{'type': 'text', 'value': 'Is GPT-4o everything you expected? 🤔', 'raw': 'Is GPT-4o everything you expected? 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'OpenAI', 'raw': '@OpenAI'}, {'type': 'text', 'value': ' has gone omni (GPT-4""o"" 🌐), a multimodal LLM, it accepts as input any combination of text, audio, and image and generates any combination of text, audio, and image outputs. 🎤📸✏️', 'raw': ' has gone omni (GPT-4""o"" 🌐), a multimodal LLM, it accepts as input any combination of text, audio, and image and generates any combination of text, audio, and image outputs. 🎤📸✏️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Based on the examples seen:', 'raw': '1️⃣ Based on the examples seen:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Inputs possible are Text ✏️, Text + Image 📝🖼️, Text + Audio 📝🎧, Text + Video 📝🎥, Audio 🎧', 'raw': 'Inputs possible are Text ✏️, Text + Image 📝🖼️, Text + Audio 📝🎧, Text + Video 📝🎥, Audio 🎧'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and outputs possible are Image 🖼️, Image + Text 🖼️📝, Text 📝, Audio 🎧', 'raw': 'and outputs possible are Image 🖼️, Image + Text 🖼️📝, Text 📝, Audio 🎧'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ 88.7% on MMLU 🏆; 90.2% on HumanEval (best in class) 🥇', 'raw': '2️⃣ 88.7% on MMLU 🏆; 90.2% on HumanEval (best in class) 🥇'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Up to 50% cheaper 💸 and 2x faster ⚡ than GPT-4 Turbo', 'raw': '3️⃣ Up to 50% cheaper 💸 and 2x faster ⚡ than GPT-4 Turbo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4️⃣ GPT-4o will be available in the free tier of ChatGPT 🎉', 'raw': '4️⃣ GPT-4o will be available in the free tier of ChatGPT 🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5️⃣ Near real-time audio with 320ms on average, similar to human conversation 🗣️**', 'raw': '5️⃣ Near real-time audio with 320ms on average, similar to human conversation 🗣️**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '6️⃣ New tokenizer with a 200k token vocabulary 📚 (previously 100k vocabulary) leading to 1.1x - 4.4x fewer tokens needed across 20 languages 🌍', 'raw': '6️⃣ New tokenizer with a 200k token vocabulary 📚 (previously 100k vocabulary) leading to 1.1x - 4.4x fewer tokens needed across 20 languages 🌍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '7️⃣ Tokenizer compression and more efficient across non-English languages (3-5 times fewer tokens for major Indian languages 🇮🇳)', 'raw': '7️⃣ Tokenizer compression and more efficient across non-English languages (3-5 times fewer tokens for major Indian languages 🇮🇳)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👐Open questions:', 'raw': '👐Open questions:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What is the context length? ❓', 'raw': '- What is the context length? ❓'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Why does GPT-4 still exist, if GPT-4o is better, faster, and cheaper? 🤨', 'raw': '- Why does GPT-4 still exist, if GPT-4o is better, faster, and cheaper? 🤨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://openai.com/index/hello-gpt-4o/', 'raw': 'https://openai.com/index/hello-gpt-4o/'}, {'type': 'text', 'value': ' 🌐', 'raw': ' 🌐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Available today:https://chatgpt.com/ 🚀', 'raw': 'Available today:https://chatgpt.com/ 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I just wanted it to be cheaper, and more accessible! 🙌', 'raw': 'I just wanted it to be cheaper, and more accessible! 🙌'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Still not open source, but a price reduction is welcome! 💰', 'raw': 'Still not open source, but a price reduction is welcome! 💰'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also, something fun happened, for the first 10-15 mins all search engines were correcting GPT-4o to GPT-4 😂', 'raw': 'Also, something fun happened, for the first 10-15 mins all search engines were correcting GPT-4o to GPT-4 😂'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also, also, GPT-4o is the model which was powering the GPT2 chatbot in the LMsys arena (ELO 1310 vs. 1253 for GPT-4 Turbo) 🏅', 'raw': 'Also, also, GPT-4o is the model which was powering the GPT2 chatbot in the LMsys arena (ELO 1310 vs. 1253 for GPT-4 Turbo) 🏅'}, {'type': 'new_line', 'raw': '\n'}]","Is GPT-4o everything you expected? 🤔 + +@OpenAI has gone omni (GPT-4""o"" 🌐), a multimodal LLM, it accepts as input any combination of text, audio, and image and generates any combination of text, audio, and image outputs. 🎤📸✏️ + +1️⃣ Based on the examples seen: +Inputs possible are Text ✏️, Text + Image 📝🖼️, Text + Audio 📝🎧, Text + Video 📝🎥, Audio 🎧 +and outputs possible are Image 🖼️, Image + Text 🖼️📝, Text 📝, Audio 🎧 + +2️⃣ 88.7% on MMLU 🏆; 90.2% on HumanEval (best in class) 🥇 + +3️⃣ Up to 50% cheaper 💸 and 2x faster ⚡ than GPT-4 Turbo + +4️⃣ GPT-4o will be available in the free tier of ChatGPT 🎉 + +5️⃣ Near real-time audio with 320ms on average, similar to human conversation 🗣️** + +6️⃣ New tokenizer with a 200k token vocabulary 📚 (previously 100k vocabulary) leading to 1.1x - 4.4x fewer tokens needed across 20 languages 🌍 + +7️⃣ Tokenizer compression and more efficient across non-English languages (3-5 times fewer tokens for major Indian languages 🇮🇳) + +👐Open questions: +- What is the context length? ❓ +- Why does GPT-4 still exist, if GPT-4o is better, faster, and cheaper? 🤨 + +Blog: https://openai.com/index/hello-gpt-4o/ 🌐 +Available today:https://chatgpt.com/ 🚀 + +I just wanted it to be cheaper, and more accessible! 🙌 + +Still not open source, but a price reduction is welcome! 💰 + +Also, something fun happened, for the first 10-15 mins all search engines were correcting GPT-4o to GPT-4 😂 + +Also, also, GPT-4o is the model which was powering the GPT2 chatbot in the LMsys arena (ELO 1310 vs. 1253 for GPT-4 Turbo) 🏅 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/5F48aMGYG_BDy_KojJodY.png'}]",[],"[{'reaction': '🔥', 'users': ['julesbsz', 'vihangsharma', 'KingNish', 'Taylor658', 'victor', 'Felladrin'], 'count': 6}, {'reaction': '👀', 'users': ['clem'], 'count': 1}]",2024-05-13 20:21:41,2024-05-25 18:45:42.197,"[{'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '61ebf3544fae7b86fdadb923', 'avatarUrl': '/avatars/9fbbb8eaaa7b19752b336cf228d4679e.svg', 'fullname': 'lucasjin', 'name': 'lucasjin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 16, 'isFollowing': False}, {'_id': '64d71ab4089bc502ceb44d29', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64d71ab4089bc502ceb44d29/nnacD7gbRSMbxCYBqkRYX.png', 'fullname': 'Eugenio Schiavoni', 'name': 'Kukedlc', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 80, 'isFollowing': False}, {'_id': '664c08f5bfd9b93ba4e3c398', 'avatarUrl': '/avatars/63a75476f74d55cf00a0b12a26464bcc.svg', 'fullname': 'adamsmith', 'name': 'adamsmith887', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '64d3f424aea0ccb1b49b2409', 'avatarUrl': '/avatars/6a1edd15ada4d2013fd0754b636c0c22.svg', 'fullname': 'A', 'name': 'Kilaril', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/singhsidhukuldeep/431728663194168,3782,,7 +https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png,2266.0,Tom Aarsen,tomaarsen,866929034800968,"[{'type': 'text', 'value': ""NuMind has just released 3 new state-of-the-art GLiNER models for Named Entity Recognition/Information Extraction. These GLiNER models allow you to specify any label that you want, and it'll find spans in the text corresponding to your label. It's been shown to work quite well on unusual domains, e.g. celestial entities in my picture."", 'raw': ""NuMind has just released 3 new state-of-the-art GLiNER models for Named Entity Recognition/Information Extraction. These GLiNER models allow you to specify any label that you want, and it'll find spans in the text corresponding to your label. It's been shown to work quite well on unusual domains, e.g. celestial entities in my picture.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'There are 3 models released:', 'raw': 'There are 3 models released:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'numind/NuNER_Zero'}, 'url': 'https://huggingface.co/numind/NuNER_Zero', 'raw': 'https://huggingface.co/numind/NuNER_Zero'}, {'type': 'text', 'value': ':', 'raw': ':'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' The primary model, SOTA & can detect really long entities.', 'raw': ' The primary model, SOTA & can detect really long entities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'numind/NuNER_Zero-span'}, 'url': 'https://huggingface.co/numind/NuNER_Zero-span', 'raw': 'https://huggingface.co/numind/NuNER_Zero-span'}, {'type': 'text', 'value': ':', 'raw': ':'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': "" Slightly better performance than NuNER Zero, but can't detect entities longer than 12 tokens."", 'raw': "" Slightly better performance than NuNER Zero, but can't detect entities longer than 12 tokens.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'numind/NuNER_Zero-4k'}, 'url': 'https://huggingface.co/numind/NuNER_Zero-4k', 'raw': 'https://huggingface.co/numind/NuNER_Zero-4k'}, {'type': 'text', 'value': ':', 'raw': ':'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Slightly worse than NuNER Zero, but has a context length of 4k tokens.', 'raw': ' Slightly worse than NuNER Zero, but has a context length of 4k tokens.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Some more details about these models in general:', 'raw': 'Some more details about these models in general:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- They are *really* small, orders of magnitude smaller than LLMs, which don't reach this level of performance."", 'raw': ""- They are *really* small, orders of magnitude smaller than LLMs, which don't reach this level of performance.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Because they're small - they're fast: <1s per sentence on free GPUs."", 'raw': ""- Because they're small - they're fast: <1s per sentence on free GPUs.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- They have an MIT license: free commercial usage.', 'raw': '- They have an MIT license: free commercial usage.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try out the demo here: ', 'raw': 'Try out the demo here: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/numind/NuZero', 'raw': 'https://huggingface.co/spaces/numind/NuZero'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Or check out all of the models here: ', 'raw': 'Or check out all of the models here: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'numind/nunerzero-zero-shot-ner-662b59803b9b438ff56e49e2'}, 'url': 'https://huggingface.co/collections/numind/nunerzero-zero-shot-ner-662b59803b9b438ff56e49e2', 'raw': 'https://huggingface.co/collections/numind/nunerzero-zero-shot-ner-662b59803b9b438ff56e49e2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If there's ever a need for me to extract some information from any text: I'll be using these. Great work "", 'raw': ""If there's ever a need for me to extract some information from any text: I'll be using these. Great work ""}, {'type': 'mention', 'user': 'Serega6678', 'raw': '@Serega6678'}, {'type': 'text', 'value': '!', 'raw': '!'}]","NuMind has just released 3 new state-of-the-art GLiNER models for Named Entity Recognition/Information Extraction. These GLiNER models allow you to specify any label that you want, and it'll find spans in the text corresponding to your label. It's been shown to work quite well on unusual domains, e.g. celestial entities in my picture. + +There are 3 models released: +- https://huggingface.co/numind/NuNER_Zero: + The primary model, SOTA & can detect really long entities. +- https://huggingface.co/numind/NuNER_Zero-span: + Slightly better performance than NuNER Zero, but can't detect entities longer than 12 tokens. +- https://huggingface.co/numind/NuNER_Zero-4k: + Slightly worse than NuNER Zero, but has a context length of 4k tokens. + +Some more details about these models in general: +- They are *really* small, orders of magnitude smaller than LLMs, which don't reach this level of performance. +- Because they're small - they're fast: <1s per sentence on free GPUs. +- They have an MIT license: free commercial usage. + +Try out the demo here: https://huggingface.co/spaces/numind/NuZero +Or check out all of the models here: https://huggingface.co/collections/numind/nunerzero-zero-shot-ner-662b59803b9b438ff56e49e2 + +If there's ever a need for me to extract some information from any text: I'll be using these. Great work @Serega6678!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6317233cc92fd6fee317e030/pwlJCY17R2A35M3iTLYia.png'}]","[{'_id': '63ff08f012c51862e5d6c769', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63ff08f012c51862e5d6c769/D0NG7kodkhCG0zzS0NRNS.jpeg', 'fullname': 'Sergei Bogdanov', 'name': 'Serega6678', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '🔥', 'users': ['osanseviero', 'mdouglas', 'clem', 'MoritzLaurer', 'julien-c', 'andrewrreed', 'nickandbro'], 'count': 7}, {'reaction': '🚀', 'users': ['deepsh2207', 'Qagent', 'julien-c', 'andrewrreed'], 'count': 4}]",2024-05-13 19:14:50,2024-05-14 07:50:36.379,"[{'_id': '6317233cc92fd6fee317e030', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png', 'fullname': 'Tom Aarsen', 'name': 'tomaarsen', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2266, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}]",/posts/tomaarsen/866929034800968,2442,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/612246596d9ce900691744d2/9DlHVQDqblKz7QPTA6nDa.jpeg,44.0,Edoardo Federici,efederici,219096989628340,"[{'type': 'text', 'value': 'Finally, I can post! 🚀', 'raw': 'Finally, I can post! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I created a Capybara-inspired Italian dataset by translating the initial instruction and running it through a pipeline to generate conversations. I used Claude Sonnet for translation and instruction generation, and Opus for generating the answers.', 'raw': 'I created a Capybara-inspired Italian dataset by translating the initial instruction and running it through a pipeline to generate conversations. I used Claude Sonnet for translation and instruction generation, and Opus for generating the answers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I hope this dataset proves useful for people working on 🇮🇹 language models.', 'raw': 'I hope this dataset proves useful for people working on 🇮🇹 language models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⛁ Open sourcing the dataset here: ', 'raw': '⛁ Open sourcing the dataset here: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'efederici/capybara-claude-15k-ita'}, 'url': 'https://huggingface.co/datasets/efederici/capybara-claude-15k-ita', 'raw': 'https://huggingface.co/datasets/efederici/capybara-claude-15k-ita'}]","Finally, I can post! 🚀 + +I created a Capybara-inspired Italian dataset by translating the initial instruction and running it through a pipeline to generate conversations. I used Claude Sonnet for translation and instruction generation, and Opus for generating the answers. + +I hope this dataset proves useful for people working on 🇮🇹 language models. + +⛁ Open sourcing the dataset here: https://huggingface.co/datasets/efederici/capybara-claude-15k-ita",[],[],"[{'reaction': '🔥', 'users': ['giux78', 'shtefcs'], 'count': 2}, {'reaction': '❤️', 'users': ['anakin87'], 'count': 1}]",2024-05-10 18:38:57,2024-10-13 10:44:04.777,"[{'_id': '6708d10f793a1fcd6c7633fb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/TwR65k1JgO_t3l4pM1UjA.png', 'fullname': 'Stefan Smiljkovic', 'name': 'shtefcs', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 96, 'isFollowing': False}]",/posts/efederici/219096989628340,1933,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,537495823373219,"[{'type': 'text', 'value': '😀😲😐😡 New Research Alert - FER-YOLO-Mamba (Facial Expressions Recognition Collection)! 😡😥🥴😱', 'raw': '😀😲😐😡 New Research Alert - FER-YOLO-Mamba (Facial Expressions Recognition Collection)! 😡😥🥴😱'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: FER-YOLO-Mamba: Facial Expression Detection and Classification Based on Selective State Space 🔝', 'raw': '📄 Title: FER-YOLO-Mamba: Facial Expression Detection and Classification Based on Selective State Space 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: FER-YOLO-Mamba is a novel facial expression recognition model that combines the strengths of YOLO and Mamba technologies to efficiently recognize and localize facial expressions.', 'raw': '📝 Description: FER-YOLO-Mamba is a novel facial expression recognition model that combines the strengths of YOLO and Mamba technologies to efficiently recognize and localize facial expressions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Hui Ma, Sen Lei, Turgay Celik, and Heng-Chao Li', 'raw': '👥 Authors: Hui Ma, Sen Lei, Turgay Celik, and Heng-Chao Li'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.01828'}, 'url': 'https://huggingface.co/papers/2405.01828', 'raw': 'https://huggingface.co/papers/2405.01828', 'label': 'FER-YOLO-Mamba: Facial Expression Detection and Classification Based on\n Selective State Space (2405.01828)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/SwjtuMa/FER-YOLO-Mamba', 'raw': 'https://github.com/SwjtuMa/FER-YOLO-Mamba'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Facial Expressions Recognition Collection: ', 'raw': '🚀 Added to the Facial Expressions Recognition Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/facial-expressions-recognition-65f22574e0724601636ddaf7'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/facial-expressions-recognition-65f22574e0724601636ddaf7', 'raw': 'https://huggingface.co/collections/DmitryRyumin/facial-expressions-recognition-65f22574e0724601636ddaf7'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥🔝 See also Facial_Expression_Recognition - ', 'raw': '🔥🔝 See also Facial_Expression_Recognition - '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ElenaRyumina/Facial_Expression_Recognition'}, 'url': 'https://huggingface.co/spaces/ElenaRyumina/Facial_Expression_Recognition', 'raw': 'https://huggingface.co/spaces/ElenaRyumina/Facial_Expression_Recognition'}, {'type': 'text', 'value': ' (App, co-authored by ', 'raw': ' (App, co-authored by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'text', 'value': ') 😉', 'raw': ') 😉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #FERYOLOMamba #FER #YOLO #Mamba #FacialExpressionRecognition #EmotionRecognition #ComputerVision #DeepLearning #MachineLearning #Innovation', 'raw': '🔍 Keywords: #FERYOLOMamba #FER #YOLO #Mamba #FacialExpressionRecognition #EmotionRecognition #ComputerVision #DeepLearning #MachineLearning #Innovation'}]","😀😲😐😡 New Research Alert - FER-YOLO-Mamba (Facial Expressions Recognition Collection)! 😡😥🥴😱 +📄 Title: FER-YOLO-Mamba: Facial Expression Detection and Classification Based on Selective State Space 🔝 + +📝 Description: FER-YOLO-Mamba is a novel facial expression recognition model that combines the strengths of YOLO and Mamba technologies to efficiently recognize and localize facial expressions. + +👥 Authors: Hui Ma, Sen Lei, Turgay Celik, and Heng-Chao Li + +🔗 Paper: https://huggingface.co/papers/2405.01828 + +📁 Repository: https://github.com/SwjtuMa/FER-YOLO-Mamba + +🚀 Added to the Facial Expressions Recognition Collection: https://huggingface.co/collections/DmitryRyumin/facial-expressions-recognition-65f22574e0724601636ddaf7 + +🔥🔝 See also Facial_Expression_Recognition - https://huggingface.co/spaces/ElenaRyumina/Facial_Expression_Recognition (App, co-authored by @DmitryRyumin) 😉 + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🔍 Keywords: #FERYOLOMamba #FER #YOLO #Mamba #FacialExpressionRecognition #EmotionRecognition #ComputerVision #DeepLearning #MachineLearning #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/NrFECOXGkk360Lc2wvgJi.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/3dolpVrZmFAev5jLZQ5mS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/U6bZW4AeD5K0WURJhxQ18.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/vy8zBehhViuC5C4vfXZAQ.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/jlNN2HK_nQdJHXNY1bk88.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/uZTE2vGYCBAbhhhJn8w24.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Y4OtSm8sTUDAKXpn0Z5xq.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/DRbqlhjuctz-cZS08z8wp.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '🔥', 'users': ['DmitryRyumin', 'samusenps', 'lysandre', 'philipp-zettl'], 'count': 4}, {'reaction': '🤗', 'users': ['DmitryRyumin', 'lysandre'], 'count': 2}, {'reaction': '❤️', 'users': ['samusenps', 'merterbak'], 'count': 2}]",2024-05-10 18:33:43,2024-05-10 18:33:43.867,[],/posts/DmitryRyumin/537495823373219,1244,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6511868c5e3bcde19c5c6dd3/SkzqAEb7akK0Wwyv5Vavy.png,2.0,Mandela Logan,mandelakori,340990784940329,"[{'type': 'text', 'value': ""We are excited to share the latest advancement in the AISAK system: the introduction of AISAK-Detect. As an essential component of AISAK-Visual, this sophisticated model specializes in object detection tasks, significantly enhancing our system's capabilities in comprehensive visual analysis."", 'raw': ""We are excited to share the latest advancement in the AISAK system: the introduction of AISAK-Detect. As an essential component of AISAK-Visual, this sophisticated model specializes in object detection tasks, significantly enhancing our system's capabilities in comprehensive visual analysis.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AISAK-Detect is built on an encoder-decoder transformer architecture with a convolutional backbone, ensuring accurate and efficient object detection within images. Our dedicated team has meticulously trained and fine-tuned this model to guarantee seamless integration into the broader AISAK ecosystem, contributing to cohesive performance in image analysis tasks.', 'raw': 'AISAK-Detect is built on an encoder-decoder transformer architecture with a convolutional backbone, ensuring accurate and efficient object detection within images. Our dedicated team has meticulously trained and fine-tuned this model to guarantee seamless integration into the broader AISAK ecosystem, contributing to cohesive performance in image analysis tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The deployment of AISAK-Detect is a significant milestone in our journey towards offering a comprehensive AI solution across multiple domains. With a unique deployment approach that prioritizes achieving an optimal system, we are committed to delivering an AI experience that goes beyond the limitations of traditional chat instances.', 'raw': 'The deployment of AISAK-Detect is a significant milestone in our journey towards offering a comprehensive AI solution across multiple domains. With a unique deployment approach that prioritizes achieving an optimal system, we are committed to delivering an AI experience that goes beyond the limitations of traditional chat instances.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Regular updates on the progress of the AISAK system, including the deployment of AISAK-Detect, will be provided to keep users informed about the advancements being made. We look forward to sharing more exciting developments as we continue to grow and innovate.', 'raw': 'Regular updates on the progress of the AISAK system, including the deployment of AISAK-Detect, will be provided to keep users informed about the advancements being made. We look forward to sharing more exciting developments as we continue to grow and innovate.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'aisak-ai/aisak-detect'}, 'url': 'https://huggingface.co/aisak-ai/aisak-detect', 'raw': 'https://huggingface.co/aisak-ai/aisak-detect'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'aisak-ai/aisak-65ddeeb08d0978de6114702f'}, 'url': 'https://huggingface.co/collections/aisak-ai/aisak-65ddeeb08d0978de6114702f', 'raw': 'https://huggingface.co/collections/aisak-ai/aisak-65ddeeb08d0978de6114702f'}]","We are excited to share the latest advancement in the AISAK system: the introduction of AISAK-Detect. As an essential component of AISAK-Visual, this sophisticated model specializes in object detection tasks, significantly enhancing our system's capabilities in comprehensive visual analysis. + +AISAK-Detect is built on an encoder-decoder transformer architecture with a convolutional backbone, ensuring accurate and efficient object detection within images. Our dedicated team has meticulously trained and fine-tuned this model to guarantee seamless integration into the broader AISAK ecosystem, contributing to cohesive performance in image analysis tasks. + +The deployment of AISAK-Detect is a significant milestone in our journey towards offering a comprehensive AI solution across multiple domains. With a unique deployment approach that prioritizes achieving an optimal system, we are committed to delivering an AI experience that goes beyond the limitations of traditional chat instances. + +Regular updates on the progress of the AISAK system, including the deployment of AISAK-Detect, will be provided to keep users informed about the advancements being made. We look forward to sharing more exciting developments as we continue to grow and innovate. + + +https://huggingface.co/aisak-ai/aisak-detect + +https://huggingface.co/collections/aisak-ai/aisak-65ddeeb08d0978de6114702f",[],[],[],2024-05-10 17:39:06,2024-05-10 17:39:37.834,[],/posts/mandelakori/340990784940329,955,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg,112.0,atayloraerospace,Taylor658,713300768980213,"[{'type': 'text', 'value': '🤗The first submissions from the Community Hugging Face Computer Vision Course (', 'raw': '🤗The first submissions from the Community Hugging Face Computer Vision Course ('}, {'type': 'link', 'href': 'https://huggingface.co/learn/computer-vision-course/unit0/welcome/welcome', 'raw': 'https://huggingface.co/learn/computer-vision-course/unit0/welcome/welcome'}, {'type': 'text', 'value': ') are being posted up on HF Spaces!🤗', 'raw': ') are being posted up on HF Spaces!🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'OmAlve/Swin-Transformer-Foods101'}, 'url': 'https://huggingface.co/spaces/OmAlve/Swin-Transformer-Foods101', 'raw': 'https://huggingface.co/spaces/OmAlve/Swin-Transformer-Foods101'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Rageshhf/medi-classifier'}, 'url': 'https://huggingface.co/spaces/Rageshhf/medi-classifier', 'raw': 'https://huggingface.co/spaces/Rageshhf/medi-classifier'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' It is amazing that the first group of students has completed the course and in record time!', 'raw': ' It is amazing that the first group of students has completed the course and in record time!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Will look forward to seeing more submissions from the course soon.', 'raw': 'Will look forward to seeing more submissions from the course soon.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A nice swag item that students get when they complete the course and make their submission is this cool Hugging Face Certificate of Completion. (Its suitable for framing) 🤗', 'raw': 'A nice swag item that students get when they complete the course and make their submission is this cool Hugging Face Certificate of Completion. (Its suitable for framing) 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👇', 'raw': '👇'}]","🤗The first submissions from the Community Hugging Face Computer Vision Course (https://huggingface.co/learn/computer-vision-course/unit0/welcome/welcome) are being posted up on HF Spaces!🤗 + +https://huggingface.co/spaces/OmAlve/Swin-Transformer-Foods101 +https://huggingface.co/spaces/Rageshhf/medi-classifier + + It is amazing that the first group of students has completed the course and in record time! + +Will look forward to seeing more submissions from the course soon. + +A nice swag item that students get when they complete the course and make their submission is this cool Hugging Face Certificate of Completion. (Its suitable for framing) 🤗 +👇","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/641b754d1911d3be6745cce9/4p_YK-vpIrT_GJZqrQJgc.png'}]",[],"[{'reaction': '🔥', 'users': ['johko', 'heisenberg3376'], 'count': 2}]",2024-05-10 16:05:33,2024-05-10 17:59:30.078,"[{'_id': '64fc070cc7f04f7cee8fa668', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/o4D0uRvnGCpawb8oU8tNN.jpeg', 'fullname': 'Om Uday Alve', 'name': 'OmAlve', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}]",/posts/Taylor658/713300768980213,1175,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png,365.0,Kuldeep Singh Sidhu,singhsidhukuldeep,497450359563909,"[{'type': 'text', 'value': 'Are you tired of writing scripts to scrape data from the web? 😓', 'raw': 'Are you tired of writing scripts to scrape data from the web? 😓'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ScrapeGraphAI is here for you! 🎉', 'raw': 'ScrapeGraphAI is here for you! 🎉'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ScrapeGraphAI is an OPEN-SOURCE web scraping Python library that uses LLM and direct graph logic to create scraping pipelines for websites and local documents (XML, HTML, JSON, etc.). 🌐📊', 'raw': 'ScrapeGraphAI is an OPEN-SOURCE web scraping Python library that uses LLM and direct graph logic to create scraping pipelines for websites and local documents (XML, HTML, JSON, etc.). 🌐📊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Just say which information you want to extract (in human language) and the library will do it for you! 🗣️🚀', 'raw': 'Just say which information you want to extract (in human language) and the library will do it for you! 🗣️🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It supports GPT, Gemini, and open-source models like Mistral. 🔍', 'raw': 'It supports GPT, Gemini, and open-source models like Mistral. 🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A few things that I could not find in the docs but would be amazing to see 🤞:', 'raw': 'A few things that I could not find in the docs but would be amazing to see 🤞:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Captcha handling 🔐', 'raw': '- Captcha handling 🔐'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Persistent data output formatting 📁', 'raw': '- Persistent data output formatting 📁'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Streaming output 📡', 'raw': '- Streaming output 📡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Explanation😂 of the tag line: ""ScrapeGraphAI: You Only Scrape Once"" What does that even mean? 🤣 Is this YOLO? 🤔', 'raw': '- Explanation😂 of the tag line: ""ScrapeGraphAI: You Only Scrape Once"" What does that even mean? 🤣 Is this YOLO? 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link: ', 'raw': 'Link: '}, {'type': 'link', 'href': 'https://github.com/VinciGit00/Scrapegraph-ai', 'raw': 'https://github.com/VinciGit00/Scrapegraph-ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo code: ', 'raw': 'Demo code: '}, {'type': 'link', 'href': 'https://github.com/amrrs/scrapegraph-code/blob/main/sourcegraph.ipynb', 'raw': 'https://github.com/amrrs/scrapegraph-code/blob/main/sourcegraph.ipynb'}]","Are you tired of writing scripts to scrape data from the web? 😓 + +ScrapeGraphAI is here for you! 🎉 + +ScrapeGraphAI is an OPEN-SOURCE web scraping Python library that uses LLM and direct graph logic to create scraping pipelines for websites and local documents (XML, HTML, JSON, etc.). 🌐📊 + +Just say which information you want to extract (in human language) and the library will do it for you! 🗣️🚀 + +It supports GPT, Gemini, and open-source models like Mistral. 🔍 + +A few things that I could not find in the docs but would be amazing to see 🤞: +- Captcha handling 🔐 +- Persistent data output formatting 📁 +- Streaming output 📡 +- Explanation😂 of the tag line: ""ScrapeGraphAI: You Only Scrape Once"" What does that even mean? 🤣 Is this YOLO? 🤔 + +Link: https://github.com/VinciGit00/Scrapegraph-ai +Demo code: https://github.com/amrrs/scrapegraph-code/blob/main/sourcegraph.ipynb","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/662bf5bfe93bb73804ef9344/ZIDTbsDxfI28gzsnvAwHn.webp'}]",[],"[{'reaction': '👍', 'users': ['svjack', 'alobnayis', 'julesbsz', 'victor', 'Michielo', 'XiangJinYu', 'Taylor658', 'orkut', 'boapps', 'johnlockejrr', 'Norod78', 'Subash07', 'shtefcs'], 'count': 13}, {'reaction': '🔥', 'users': ['KingNish', 'Subash07'], 'count': 2}, {'reaction': '🤗', 'users': ['johnlockejrr', 'Subash07'], 'count': 2}]",2024-05-10 13:09:41,2024-11-27 06:56:34.123,"[{'_id': '6461bdd6cd9ba6a317c89425', 'avatarUrl': '/avatars/235d54b867ebc959639a8a7fd004b4b5.svg', 'fullname': 'scapking', 'name': 'scapking', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '662bf5bfe93bb73804ef9344', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png', 'fullname': 'Kuldeep Singh Sidhu', 'name': 'singhsidhukuldeep', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 365, 'isFollowing': False}, {'_id': '663f208929be04778bb3d4c7', 'avatarUrl': '/avatars/63e629014c2693ead457a4181d5982b7.svg', 'fullname': 'R.ML', 'name': 'zebfaddd', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '66751fe8a845e4470f31c4f5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/8EA-1YIDSgRS3GfrKTJBa.png', 'fullname': 'Barry robbins', 'name': 'Barryrobbins64', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6670228bd8101e70bd8ce74c', 'avatarUrl': '/avatars/5f1b6e62b4ef66973d842aac5a836d06.svg', 'fullname': 'Recep Serit', 'name': 'rserit', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/singhsidhukuldeep/497450359563909,2459,,6 +/avatars/efc6a9cb98a6b485f7bcb11e5b7b143f.svg,3.0,Grace Smith,BoredApeYachtClub,814931549145861,"[{'type': 'text', 'value': 'This is a song from imagine dragons believer.', 'raw': 'This is a song from imagine dragons believer.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This song is to say thanks to hf team for accepting me! I was waiting for a long time.', 'raw': 'This song is to say thanks to hf team for accepting me! I was waiting for a long time.'}, {'type': 'new_line', 'raw': '\n'}]","This is a song from imagine dragons believer. +This song is to say thanks to hf team for accepting me! I was waiting for a long time. +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6617efcf0ca79090cd6b21e3/sBDwHtsl2nojgjxEo4huF.mp4'}]",[],[],2024-05-10 12:22:56,2024-05-15 14:27:10.160,"[{'_id': '6617efcf0ca79090cd6b21e3', 'avatarUrl': '/avatars/efc6a9cb98a6b485f7bcb11e5b7b143f.svg', 'fullname': 'Grace Smith', 'name': 'BoredApeYachtClub', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/BoredApeYachtClub/814931549145861,1098,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg,2185.0,Hafedh Hichri,not-lain,369756215025167,"[{'type': 'text', 'value': '🥳celebrating 5K readers in one of my blog posts 🥳', 'raw': '🥳celebrating 5K readers in one of my blog posts 🥳'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I came back with another one this time 🤓', 'raw': 'I came back with another one this time 🤓'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'in this blog you will learn 📖 :', 'raw': 'in this blog you will learn 📖 :'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* How to train custom AI models with the trainer API 🚀', 'raw': '* How to train custom AI models with the trainer API 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* integrate your AI models with HF using the mixin classes 🔥', 'raw': '* integrate your AI models with HF using the mixin classes 🔥'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'happy reading everyone 🤗', 'raw': 'happy reading everyone 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗link: ', 'raw': '🔗link: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/not-lain/trainer-api-and-mixin-classes', 'raw': 'https://huggingface.co/blog/not-lain/trainer-api-and-mixin-classes'}, {'type': 'new_line', 'raw': '\n'}]","🥳celebrating 5K readers in one of my blog posts 🥳 +I came back with another one this time 🤓 +in this blog you will learn 📖 : +* How to train custom AI models with the trainer API 🚀 +* integrate your AI models with HF using the mixin classes 🔥 + +happy reading everyone 🤗 +🔗link: https://huggingface.co/blog/not-lain/trainer-api-and-mixin-classes +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6527e89a8808d80ccff88b7a/O1KarGHpOx7MGPsz2VZlL.png'}]",[],"[{'reaction': '❤️', 'users': ['BoredApeYachtClub', 'victor', 'KvrParaskevi', 'Ramikan-BR', 'DESUCLUB'], 'count': 5}, {'reaction': '🔥', 'users': ['singhsidhukuldeep', 'Ramikan-BR'], 'count': 2}, {'reaction': '🚀', 'users': ['Ramikan-BR'], 'count': 1}, {'reaction': '👀', 'users': ['Ramikan-BR'], 'count': 1}]",2024-05-10 12:16:34,2024-05-10 13:44:41.367,"[{'_id': '662bf5bfe93bb73804ef9344', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/662bf5bfe93bb73804ef9344/WXYLnjjJ4SROkoveIi7If.png', 'fullname': 'Kuldeep Singh Sidhu', 'name': 'singhsidhukuldeep', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 365, 'isFollowing': False}, {'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}]",/posts/not-lain/369756215025167,1244,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg,161.0,Feynman Innovations,ajibawa-2023,997218771113601,"[{'type': 'text', 'value': 'Thank you very much hf team for accepting me! I was waiting for very long time. Thank you ', 'raw': 'Thank you very much hf team for accepting me! I was waiting for very long time. Thank you '}]",Thank you very much hf team for accepting me! I was waiting for very long time. Thank you ,[],[],"[{'reaction': '🚀', 'users': ['merterbak', 'damerajee', 'a9i'], 'count': 3}, {'reaction': '🤗', 'users': ['BoredApeYachtClub', 'mujun', 'damerajee'], 'count': 3}, {'reaction': '👍', 'users': ['iafy', 'PlanetDOGE'], 'count': 2}]",2024-05-10 12:08:54,2024-05-10 12:08:54.620,[],/posts/ajibawa-2023/997218771113601,2206,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/2p_IJctIFETtjciHxNOpN.jpeg,19.0,Avelina Hadji-Kyriacou,Avelina,578538079209654,"[{'type': 'text', 'value': 'Found out my ECCV paper is getting rejected because of a LaTeX compile error :(', 'raw': 'Found out my ECCV paper is getting rejected because of a LaTeX compile error :('}]",Found out my ECCV paper is getting rejected because of a LaTeX compile error :(,[],[],"[{'reaction': '😔', 'users': ['osanseviero', 'Gatozu35', 'BoredApeYachtClub', 'monsoon-nlp', 'taufiqdp', 'sikang99', 'm18coppola', 'pcuenq'], 'count': 8}, {'reaction': '❤️', 'users': ['9voltfan2009', 'm18coppola'], 'count': 2}]",2024-05-10 10:40:50,2024-05-10 10:40:50.535,[],/posts/Avelina/578538079209654,1228,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62710bd57b9f120adb36e451/xv02RE8VgayDPDE6jkwV2.png,39.0,Mateusz Dziemian,mattmdjaga,150370371340531,"[{'type': 'text', 'value': 'NEW HF 🤗 COURSE to help people dive into Computer Vision built by the HF community. Over the last 6 months the hugging face discord community has been hard at work developing a new computer vision course. Receive a Certificate of completion and share it on your socials 🤗.', 'raw': 'NEW HF 🤗 COURSE to help people dive into Computer Vision built by the HF community. Over the last 6 months the hugging face discord community has been hard at work developing a new computer vision course. Receive a Certificate of completion and share it on your socials 🤗.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/learn/computer-vision-course/unit0/welcome/welcome', 'raw': 'https://huggingface.co/learn/computer-vision-course/unit0/welcome/welcome'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","NEW HF 🤗 COURSE to help people dive into Computer Vision built by the HF community. Over the last 6 months the hugging face discord community has been hard at work developing a new computer vision course. Receive a Certificate of completion and share it on your socials 🤗. + +https://huggingface.co/learn/computer-vision-course/unit0/welcome/welcome + + +",[],[],"[{'reaction': '🚀', 'users': ['mattmdjaga', 'osanseviero', 'not-lain', 'Taylor658', 'mmhamdy'], 'count': 5}, {'reaction': '🔥', 'users': ['mmhamdy'], 'count': 1}]",2024-05-10 10:22:33,2024-05-10 17:22:54.398,"[{'_id': '641b754d1911d3be6745cce9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/641b754d1911d3be6745cce9/DxjZG1XT4H3ZHF7qHxWxk.jpeg', 'fullname': 'atayloraerospace', 'name': 'Taylor658', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 112, 'isFollowing': False}]",/posts/mattmdjaga/150370371340531,1817,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,679817419734518,"[{'type': 'text', 'value': 'Chemllm.org Now transfered to ChemLLM-20B-DPO, Have a try now!🤗', 'raw': 'Chemllm.org Now transfered to ChemLLM-20B-DPO, Have a try now!🤗'}]","Chemllm.org Now transfered to ChemLLM-20B-DPO, Have a try now!🤗",[],[],"[{'reaction': '🔥', 'users': ['pedi', 'ShahabVFX', 'dillfrescott', 'rbiswasfc'], 'count': 4}, {'reaction': '🚀', 'users': ['pedi', 'deamyasin', 'dillfrescott'], 'count': 3}]",2024-05-05 16:34:42,2024-05-05 16:34:42.123,[],/posts/di-zhang-fdu/679817419734518,2015,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,833340983098067,"[{'type': 'text', 'value': ""mlx_micrograd - mlx port of Karpathy's micrograd- a tiny scalar-valued autograd engine with a small PyTorch-like neural network library on top."", 'raw': ""mlx_micrograd - mlx port of Karpathy's micrograd- a tiny scalar-valued autograd engine with a small PyTorch-like neural network library on top.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/Jaykef/mlx_micrograd', 'raw': 'https://github.com/Jaykef/mlx_micrograd'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Installation', 'raw': 'Installation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'bash', 'code': 'pip install mlx_micrograd', 'raw': '```bash\npip install mlx_micrograd\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Example usage', 'raw': 'Example usage'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Example showing a number of possible supported operations:', 'raw': 'Example showing a number of possible supported operations:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'lang': 'python', 'code': ""from mlx_micrograd.engine import Value\n\na = Value(-4.0)\nb = Value(2.0)\nc = a + b\nd = a * b + b**3\nc += c + 1\nc += 1 + c + (-a)\nd += d * 2 + (b + a).relu()\nd += 3 * d + (b - a).relu()\ne = c - d\nf = e**2\ng = f / 2.0\ng += 10.0 / f\nprint(f'{g.data}') # prints array(24.7041, dtype=float32), the outcome of this forward pass\ng.backward()\nprint(f'{a.grad}') # prints array(138.834, dtype=float32), i.e. the numerical value of dg/da\nprint(f'{b.grad}') # prints array(645.577, dtype=float32), i.e. the numerical value of dg/db"", 'raw': ""```python\nfrom mlx_micrograd.engine import Value\n\na = Value(-4.0)\nb = Value(2.0)\nc = a + b\nd = a * b + b**3\nc += c + 1\nc += 1 + c + (-a)\nd += d * 2 + (b + a).relu()\nd += 3 * d + (b - a).relu()\ne = c - d\nf = e**2\ng = f / 2.0\ng += 10.0 / f\nprint(f'{g.data}') # prints array(24.7041, dtype=float32), the outcome of this forward pass\ng.backward()\nprint(f'{a.grad}') # prints array(138.834, dtype=float32), i.e. the numerical value of dg/da\nprint(f'{b.grad}') # prints array(645.577, dtype=float32), i.e. the numerical value of dg/db\n```""}, {'type': 'new_line', 'raw': '\n'}]","mlx_micrograd - mlx port of Karpathy's micrograd- a tiny scalar-valued autograd engine with a small PyTorch-like neural network library on top. + +https://github.com/Jaykef/mlx_micrograd +Installation +```bash +pip install mlx_micrograd +``` +Example usage +Example showing a number of possible supported operations: +```python +from mlx_micrograd.engine import Value + +a = Value(-4.0) +b = Value(2.0) +c = a + b +d = a * b + b**3 +c += c + 1 +c += 1 + c + (-a) +d += d * 2 + (b + a).relu() +d += 3 * d + (b - a).relu() +e = c - d +f = e**2 +g = f / 2.0 +g += 10.0 / f +print(f'{g.data}') # prints array(24.7041, dtype=float32), the outcome of this forward pass +g.backward() +print(f'{a.grad}') # prints array(138.834, dtype=float32), i.e. the numerical value of dg/da +print(f'{b.grad}') # prints array(645.577, dtype=float32), i.e. the numerical value of dg/db +``` +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/9geq5sD5zYWsGyrMINfkk.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/UQDo2hPR-kh7dUTy6pCFk.png'}]",[],"[{'reaction': '👍', 'users': ['akashicmarga', 'ShahabVFX', 'ahmedzx'], 'count': 3}]",2024-05-05 16:09:11,2024-05-05 16:50:28.798,[],/posts/Jaward/833340983098067,1800,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64740cf7485a7c8e1bd51ac9/qxcEOKY4M36ggryaoc4L-.jpeg,226.0,Beckett Dillon,Severian,618185008303784,"[{'type': 'text', 'value': 'Create and Train Your Own Expert LLM: Generating Synthetic, Fact-Based Datasets with LMStudio/Ollama and then fine-tuning with MLX and Unsloth', 'raw': 'Create and Train Your Own Expert LLM: Generating Synthetic, Fact-Based Datasets with LMStudio/Ollama and then fine-tuning with MLX and Unsloth'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hey everyone!', 'raw': 'Hey everyone!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I know there are tons of videos and tutorials out there already but I've noticed a lot of questions popping up in community posts about using synthetic datasets for creative projects and how to transform personal content into more factual material. In my own work doing enterprise-level SFT and crafting my open-source models, I've enhanced a Python framework originally shared by the creator of the Tess models. This improved stack utilizes local language models and also integrates the Wikipedia dataset to ensure that the content generated is as accurate and reliable as possible."", 'raw': ""I know there are tons of videos and tutorials out there already but I've noticed a lot of questions popping up in community posts about using synthetic datasets for creative projects and how to transform personal content into more factual material. In my own work doing enterprise-level SFT and crafting my open-source models, I've enhanced a Python framework originally shared by the creator of the Tess models. This improved stack utilizes local language models and also integrates the Wikipedia dataset to ensure that the content generated is as accurate and reliable as possible.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've been thinking of putting together a comprehensive, step-by-step course/guide on creating your own Expert Language Model. From dataset preparation and training to deployment on Hugging Face and even using something like AnythingLLM for user interaction. I'll walk you through each phase, clarifying complex concepts and troubleshooting common pitfalls."", 'raw': ""I've been thinking of putting together a comprehensive, step-by-step course/guide on creating your own Expert Language Model. From dataset preparation and training to deployment on Hugging Face and even using something like AnythingLLM for user interaction. I'll walk you through each phase, clarifying complex concepts and troubleshooting common pitfalls.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Let me know if this interests you!', 'raw': 'Let me know if this interests you!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Most of the datasets and models I've made have been using these scripts and my approach"", 'raw': ""Most of the datasets and models I've made have been using these scripts and my approach""}]","Create and Train Your Own Expert LLM: Generating Synthetic, Fact-Based Datasets with LMStudio/Ollama and then fine-tuning with MLX and Unsloth + +Hey everyone! + +I know there are tons of videos and tutorials out there already but I've noticed a lot of questions popping up in community posts about using synthetic datasets for creative projects and how to transform personal content into more factual material. In my own work doing enterprise-level SFT and crafting my open-source models, I've enhanced a Python framework originally shared by the creator of the Tess models. This improved stack utilizes local language models and also integrates the Wikipedia dataset to ensure that the content generated is as accurate and reliable as possible. + +I've been thinking of putting together a comprehensive, step-by-step course/guide on creating your own Expert Language Model. From dataset preparation and training to deployment on Hugging Face and even using something like AnythingLLM for user interaction. I'll walk you through each phase, clarifying complex concepts and troubleshooting common pitfalls. + +Let me know if this interests you! + +Most of the datasets and models I've made have been using these scripts and my approach",[],[],"[{'reaction': '👍', 'users': ['edumunozsala', 'SmartGoldfish', 'rizwanalvi1', 'HeRksTAn', 'algorithm', 'aimillsapps', 'KingNish', 'Dlbk', 'win10', 'Stefan171', 'shivamgpt', 'Ayeleth', 'iweavings', 'Clausss', 'kid502', 'Deepankar27', 'krishnamishra94', 'Taylor658', 'victor', 'Nioi', 'ajibawa-2023', 'nivram189', 'pedi', 'ShahabVFX', 'ahmedzx', 'AtAndDev', 'alt2023', 'schlafen'], 'count': 28}, {'reaction': '❤️', 'users': ['afkfatih', 'algorithm', 'MultiTrickFox', 'raincandy-u', 'victor', 'nivram189', 'pedi', 'LeroyDyer', 'ahmedzx', 'AtAndDev', 'GaelicThunder'], 'count': 11}, {'reaction': '🚀', 'users': ['HeRksTAn', 'algorithm', 'victor', 'nivram189', 'pedi', 'AtAndDev', 'Norod78'], 'count': 7}, {'reaction': '🔥', 'users': ['algorithm', 'victor', 'nivram189', 'pedi', 'AtAndDev'], 'count': 5}, {'reaction': '😎', 'users': ['algorithm', 'victor', 'nivram189', 'AtAndDev'], 'count': 4}]",2024-05-04 15:25:22,2024-05-08 04:17:19.518,"[{'_id': '642d678777078db98b729188', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/642d678777078db98b729188/lYhIEChF4qQG8ltRF3ECw.png', 'fullname': 'algorithm', 'name': 'algorithm', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '62ec82aaaefff8bcb1336b87', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62ec82aaaefff8bcb1336b87/1_-oJrfuFTSiHnYv1eBPr.jpeg', 'fullname': 'Malich Coory', 'name': 'IndrasMirror', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '640720285e6d06cc2cf346e4', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/640720285e6d06cc2cf346e4/p-U1ON7ZPNntBAnOo8FCE.png', 'fullname': 'ZINWIN(Zuojun-Ye)', 'name': 'win10', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 49, 'isFollowing': False}, {'_id': '65d883893a52cd9bcd8ab7cf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65d883893a52cd9bcd8ab7cf/tRsCJlHNZo1D02kBTmfy9.jpeg', 'fullname': 'leroy Samuel Dyer', 'name': 'LeroyDyer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 107, 'isFollowing': False}, {'_id': '646d811eeb9268aeebc4bfeb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646d811eeb9268aeebc4bfeb/lbGHopd87wkB-TL1rCZHR.jpeg', 'fullname': 'iweavings', 'name': 'iweavings', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '657eb5b256c9c67605a6e8b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/657eb5b256c9c67605a6e8b5/RPblnGJX57oiIcASEz_S8.png', 'fullname': 'raincandy_U', 'name': 'raincandy-u', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 37, 'isFollowing': False}, {'_id': '64e9d0e364619253012c8e64', 'avatarUrl': '/avatars/dbd21f8d0624422f232aa0d788e6838b.svg', 'fullname': 'yy', 'name': 'capti0n', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '66390a72c9648d0612488340', 'avatarUrl': '/avatars/09d248a6f220819763c810a73a324e8c.svg', 'fullname': 'rshee', 'name': 'pedi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '663921b95cbebc0a7755244d', 'avatarUrl': '/avatars/5a1845b56a7489e029b200d92d7958be.svg', 'fullname': 'Ahmed Moawad ', 'name': 'ahmedzx', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}]",/posts/Severian/618185008303784,3807,,16 +https://cdn-avatars.huggingface.co/v1/production/uploads/65eb213f00f1a613daafd462/uZuhXpD2B2aadwQxoL7DK.jpeg,57.0,Phenix Rhyder,phenixrhyder,870174544714806,"[{'type': 'text', 'value': 'Midjourney Ai', 'raw': 'Midjourney Ai'}]",Midjourney Ai,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/X0KponEOz7eS_5OlAIRi6.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/eOxHkKLmZg342g2gpYP5E.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/u830LW6ybGbZF9ux3NdNj.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/sk88xoVsxA3LxSEDbQ8E1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/uikhDPksrS9PAh2m10KcT.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/PNXjOu_09ZOgDvLe1gXEq.png'}]",[],"[{'reaction': '🔥', 'users': ['Rohitkhatri75436', 'ShahabVFX'], 'count': 2}, {'reaction': '👍', 'users': ['ssml2050'], 'count': 1}]",2024-05-04 12:53:09,2024-05-10 11:59:19.110,"[{'_id': '62f6d8f5c3372328414e0dfc', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1660344552924-noauth.png', 'fullname': 'Ilya Shigabeev', 'name': 'frappuccino', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '65eb213f00f1a613daafd462', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65eb213f00f1a613daafd462/uZuhXpD2B2aadwQxoL7DK.jpeg', 'fullname': 'Phenix Rhyder', 'name': 'phenixrhyder', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 57, 'isFollowing': False}]",/posts/phenixrhyder/870174544714806,3208,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/63dc683562dc193e6d45ceb3/hEjuQkt6RKxl-sUBeDxRp.png,16.0,Ayush Thakur,ayush-thakur02,795134066112215,"[{'type': 'text', 'value': 'Enhancing Distributed Systems with Self-Healing Nodes and Adaptive Data Sharding', 'raw': 'Enhancing Distributed Systems with Self-Healing Nodes and Adaptive Data Sharding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2405.00004'}, 'url': 'https://huggingface.co/papers/2405.00004', 'raw': 'https://huggingface.co/papers/2405.00004', 'label': 'Self-healing Nodes with Adaptive Data-Sharding (2405.00004)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The paper introduces an innovative approach to improve distributed systems by integrating self-healing nodes with adaptive data sharding. This method leverages advanced concepts like self-replication, fractal regeneration, and predictive sharding to enhance scalability, performance, fault tolerance, and adaptability.', 'raw': 'The paper introduces an innovative approach to improve distributed systems by integrating self-healing nodes with adaptive data sharding. This method leverages advanced concepts like self-replication, fractal regeneration, and predictive sharding to enhance scalability, performance, fault tolerance, and adaptability.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Concepts:', 'raw': 'Key Concepts:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Self-Replication: Nodes can create copies of themselves or their data to aid in recovery and load balancing.', 'raw': '- Self-Replication: Nodes can create copies of themselves or their data to aid in recovery and load balancing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Fractal Regeneration: Nodes can reconfigure and restore their functionality after partial damage, inspired by natural fractals.', 'raw': '- Fractal Regeneration: Nodes can reconfigure and restore their functionality after partial damage, inspired by natural fractals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Predictive Sharding: Nodes can anticipate future data trends and proactively adjust data distribution to optimize performance.', 'raw': '- Predictive Sharding: Nodes can anticipate future data trends and proactively adjust data distribution to optimize performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Methodology:', 'raw': 'Methodology:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The approach consists of four main steps:', 'raw': 'The approach consists of four main steps:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- Temporal data sharding based on data's temporal characteristics."", 'raw': ""- Temporal data sharding based on data's temporal characteristics.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Self-replicating nodes to enhance data availability and reliability.', 'raw': '- Self-replicating nodes to enhance data availability and reliability.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Fractal regeneration for robust recovery mechanisms.', 'raw': '- Fractal regeneration for robust recovery mechanisms.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Predictive sharding using consistent hashing to anticipate and adapt to future data trends.', 'raw': '- Predictive sharding using consistent hashing to anticipate and adapt to future data trends.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Results and Analysis:', 'raw': 'Results and Analysis:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Experimental evaluations show that this approach outperforms existing data sharding techniques in scalability, performance, fault tolerance, and adaptability. The use of synthetic data and workload generators created realistic scenarios for testing.', 'raw': 'Experimental evaluations show that this approach outperforms existing data sharding techniques in scalability, performance, fault tolerance, and adaptability. The use of synthetic data and workload generators created realistic scenarios for testing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Applications:', 'raw': 'Applications:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The methodology can be applied to various domains such as distributed database systems, blockchain networks, IoT, and cloud computing, offering improvements in data distribution efficiency and system resilience.', 'raw': 'The methodology can be applied to various domains such as distributed database systems, blockchain networks, IoT, and cloud computing, offering improvements in data distribution efficiency and system resilience.'}]","Enhancing Distributed Systems with Self-Healing Nodes and Adaptive Data Sharding + +Paper: https://huggingface.co/papers/2405.00004 + +The paper introduces an innovative approach to improve distributed systems by integrating self-healing nodes with adaptive data sharding. This method leverages advanced concepts like self-replication, fractal regeneration, and predictive sharding to enhance scalability, performance, fault tolerance, and adaptability. + +Key Concepts: +- Self-Replication: Nodes can create copies of themselves or their data to aid in recovery and load balancing. +- Fractal Regeneration: Nodes can reconfigure and restore their functionality after partial damage, inspired by natural fractals. +- Predictive Sharding: Nodes can anticipate future data trends and proactively adjust data distribution to optimize performance. + +Methodology: +The approach consists of four main steps: +- Temporal data sharding based on data's temporal characteristics. +- Self-replicating nodes to enhance data availability and reliability. +- Fractal regeneration for robust recovery mechanisms. +- Predictive sharding using consistent hashing to anticipate and adapt to future data trends. + +Results and Analysis: +Experimental evaluations show that this approach outperforms existing data sharding techniques in scalability, performance, fault tolerance, and adaptability. The use of synthetic data and workload generators created realistic scenarios for testing. + +Applications: +The methodology can be applied to various domains such as distributed database systems, blockchain networks, IoT, and cloud computing, offering improvements in data distribution efficiency and system resilience.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/qmmIsq2TFPG37NmerYlyg.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/ccoMoUKmE46mk-YSfLkgg.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/ZpelR9aLP8FbTwmmA0YcW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63dc683562dc193e6d45ceb3/7d7G1l_SLhd3h8AXDxyY8.png'}]",[],"[{'reaction': '👍', 'users': ['ssml2050', 'ayush-thakur02', 'ShahabVFX', 'samusenps', 'hiyouga'], 'count': 5}, {'reaction': '👀', 'users': ['victor'], 'count': 1}, {'reaction': '❤️', 'users': ['samusenps'], 'count': 1}, {'reaction': '🔥', 'users': ['samusenps'], 'count': 1}]",2024-05-04 07:18:49,2024-05-04 07:18:49.848,[],/posts/ayush-thakur02/795134066112215,2958,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,968496263925772,"[{'type': 'text', 'value': '# Thoughts on Neural Scaling Laws', 'raw': '# Thoughts on Neural Scaling Laws'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'When you take a zoomed-out perspective view on the success goals of neural networks, you see they all revolve around the Scaling Laws - empirical observations that performance improves with increased model size, dataset, and compute resources.', 'raw': 'When you take a zoomed-out perspective view on the success goals of neural networks, you see they all revolve around the Scaling Laws - empirical observations that performance improves with increased model size, dataset, and compute resources.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The specifics of how these laws apply, vary for different modalities and architectures. This is notable in the empirical equations used to measure these laws. ', 'raw': 'The specifics of how these laws apply, vary for different modalities and architectures. This is notable in the empirical equations used to measure these laws. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Yet they all heavily rely on three main factors - Data, Size and Computation. These factors themselves also have sub-dependencies - data size & quality, model size & architecture, num of GPUs & code for compute kernels respectively.', 'raw': 'Yet they all heavily rely on three main factors - Data, Size and Computation. These factors themselves also have sub-dependencies - data size & quality, model size & architecture, num of GPUs & code for compute kernels respectively.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As research in these laws progresses, we begin to see new scaling laws emerge that may apply in much different ways than usual. This is typical in recent local LLMs (Phi-3, Gemma 2B, LLMs in a flash) which shows small sized models with small rich quality data beating large models', 'raw': 'As research in these laws progresses, we begin to see new scaling laws emerge that may apply in much different ways than usual. This is typical in recent local LLMs (Phi-3, Gemma 2B, LLMs in a flash) which shows small sized models with small rich quality data beating large models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I look forward to the singularity moment - when these laws take a full round spin and meet at where it all began:)', 'raw': 'I look forward to the singularity moment - when these laws take a full round spin and meet at where it all began:)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'References:', 'raw': 'References:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Scaling Laws for Neural Language Models: ', 'raw': '- Scaling Laws for Neural Language Models: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2001.08361', 'raw': 'https://arxiv.org/pdf/2001.08361'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Scaling Laws for Autoregressive Generative Modeling: ', 'raw': '- Scaling Laws for Autoregressive Generative Modeling: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2010.14701', 'raw': 'https://arxiv.org/abs/2010.14701'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- LLMs in a flash: ', 'raw': '- LLMs in a flash: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2312.11514', 'raw': 'https://arxiv.org/abs/2312.11514'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Phi-3 Technical Report: ', 'raw': '- Phi-3 Technical Report: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2404.14219', 'raw': 'https://arxiv.org/abs/2404.14219'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Gemma 2B: ', 'raw': '- Gemma 2B: '}, {'type': 'link', 'href': 'https://arxiv.org/pdf/2403.08295', 'raw': 'https://arxiv.org/pdf/2403.08295'}]","# Thoughts on Neural Scaling Laws +When you take a zoomed-out perspective view on the success goals of neural networks, you see they all revolve around the Scaling Laws - empirical observations that performance improves with increased model size, dataset, and compute resources. + +The specifics of how these laws apply, vary for different modalities and architectures. This is notable in the empirical equations used to measure these laws. + +Yet they all heavily rely on three main factors - Data, Size and Computation. These factors themselves also have sub-dependencies - data size & quality, model size & architecture, num of GPUs & code for compute kernels respectively. + +As research in these laws progresses, we begin to see new scaling laws emerge that may apply in much different ways than usual. This is typical in recent local LLMs (Phi-3, Gemma 2B, LLMs in a flash) which shows small sized models with small rich quality data beating large models + +I look forward to the singularity moment - when these laws take a full round spin and meet at where it all began:) + +References: +- Scaling Laws for Neural Language Models: https://arxiv.org/pdf/2001.08361 +- Scaling Laws for Autoregressive Generative Modeling: https://arxiv.org/abs/2010.14701 +- LLMs in a flash: https://arxiv.org/abs/2312.11514 +- Phi-3 Technical Report: https://arxiv.org/abs/2404.14219 +- Gemma 2B: https://arxiv.org/pdf/2403.08295","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/2Dp8dua75lkx8Z8Adf6EW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/b_PjwPYIBmL2ZQPCUhHKk.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/zIdvNDx44FQVyhI0g7K1-.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/pw_HjW_LHiVIeXN9PZofU.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/3wKsQPJyTMmZ4M_o_oQpW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/m_yPXI_RMwxGyIo7vWj_2.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/Ua-dF5X0D0xEYyewtKeAa.png'}]",[],"[{'reaction': '👍', 'users': ['ssml2050', 'ShahabVFX', 'adorkin'], 'count': 3}]",2024-05-04 04:37:46,2024-05-05 01:11:48.948,[],/posts/Jaward/968496263925772,2456,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,914231688157819,"[{'type': 'text', 'value': 'The IDM-VTON (Improving Diffusion Models for Authentic Virtual Try-on in the Wild) is so powerful that it can even transfer beard or hair as well.', 'raw': 'The IDM-VTON (Improving Diffusion Models for Authentic Virtual Try-on in the Wild) is so powerful that it can even transfer beard or hair as well.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have prepared installer scripts and full tutorials for Windows (requires min 8 GB VRAM GPU), Massed Compute (I suggest this if you don’t have a strong GPU), RunPod and a free Kaggle account (works perfect as well but slow).', 'raw': 'I have prepared installer scripts and full tutorials for Windows (requires min 8 GB VRAM GPU), Massed Compute (I suggest this if you don’t have a strong GPU), RunPod and a free Kaggle account (works perfect as well but slow).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Windows Tutorial : ', 'raw': 'Windows Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/m4pcIeAVQD0', 'raw': 'https://youtu.be/m4pcIeAVQD0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Cloud (Massed Compute, RunPod & Kaggle) Tutorial : ', 'raw': 'Cloud (Massed Compute, RunPod & Kaggle) Tutorial : '}, {'type': 'link', 'href': 'https://youtu.be/LeHfgq_lAXU', 'raw': 'https://youtu.be/LeHfgq_lAXU'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","The IDM-VTON (Improving Diffusion Models for Authentic Virtual Try-on in the Wild) is so powerful that it can even transfer beard or hair as well. + +I have prepared installer scripts and full tutorials for Windows (requires min 8 GB VRAM GPU), Massed Compute (I suggest this if you don’t have a strong GPU), RunPod and a free Kaggle account (works perfect as well but slow). + +Windows Tutorial : https://youtu.be/m4pcIeAVQD0 + +Cloud (Massed Compute, RunPod & Kaggle) Tutorial : https://youtu.be/LeHfgq_lAXU + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/6FAf1Qa3ScZOsDUViuqIx.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/8vn_YR30RoPaRssRTIHah.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/NvTqEBlqwCDzwvXI3qEDE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Rvz-KVu-UPPobFv2_twyO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/wn0XIlLuqojp_PXmYNaMY.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/fQPgfa3j5qj3F2AFvNVGk.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/q48hC5Ya925Bxc0NaD5kr.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Dfjky0p5YqSU7F2QNgJa7.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/1FM9nqcqfrohQiffAW0Qe.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Lq6qZUTbDm8NzTYGVnVSA.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/ymQ2IzhvG0rwaONN4cmVP.png'}]",[],"[{'reaction': '🚀', 'users': ['Jaward', 'ShahabVFX'], 'count': 2}, {'reaction': '👍', 'users': ['kevinpics'], 'count': 1}]",2024-05-03 23:00:42,2024-05-03 23:00:42.154,[],/posts/MonsterMMORPG/914231688157819,2766,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,175833406902405,"[{'type': 'text', 'value': 'ChemLLM-20B SFT and DPO is coming!🤗', 'raw': 'ChemLLM-20B SFT and DPO is coming!🤗'}]",ChemLLM-20B SFT and DPO is coming!🤗,[],[],"[{'reaction': '👍', 'users': ['ajibawa-2023', 'ssml2050', 'ShahabVFX', 'n4ze3m', 'dillfrescott'], 'count': 5}]",2024-05-03 19:56:12,2024-05-05 01:41:01.774,"[{'_id': '64bce15bafd1e46c5504ad38', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png', 'fullname': 'Di Zhang', 'name': 'di-zhang-fdu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 151, 'isFollowing': False}]",/posts/di-zhang-fdu/175833406902405,2391,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,762050501752293,"[{'type': 'text', 'value': 'A new dataset for anyone interested in Satellite imagery: 3 million ', 'raw': 'A new dataset for anyone interested in Satellite imagery: 3 million '}, {'type': 'mention', 'user': 'Satellogic', 'raw': '@Satellogic'}, {'type': 'text', 'value': ' images of unique locations — 6 million images, including location revisits — from around the world under a Creative Commons CC-BY 4.0 license.', 'raw': ' images of unique locations — 6 million images, including location revisits — from around the world under a Creative Commons CC-BY 4.0 license.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Interesting potential in journalism.', 'raw': 'Interesting potential in journalism.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'satellogic/EarthView'}, 'url': 'https://huggingface.co/datasets/satellogic/EarthView', 'raw': 'https://huggingface.co/datasets/satellogic/EarthView'}]","A new dataset for anyone interested in Satellite imagery: 3 million @Satellogic images of unique locations — 6 million images, including location revisits — from around the world under a Creative Commons CC-BY 4.0 license. + +Interesting potential in journalism. + +https://huggingface.co/datasets/satellogic/EarthView",[],[],"[{'reaction': '🚀', 'users': ['Taylor658', 'ajibawa-2023', 'Ndonda871', 'qwertcz', 'pedi', 'ShahabVFX'], 'count': 6}, {'reaction': '👍', 'users': ['ssml2050', 'Ndonda871', 'qwertcz'], 'count': 3}]",2024-05-03 17:01:58,2024-05-03 17:01:58.652,[],/posts/fdaudens/762050501752293,2480,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/65ff2f9fcc7a4f35567b9098/oilyj1kmz11ifVoCLrK-H.png,11.0,George Cameron,georgewritescode,101449295408409,"[{'type': 'text', 'value': 'Excited to bring our benchmarking leaderboard of >100 LLM API endpoints to HF! ', 'raw': 'Excited to bring our benchmarking leaderboard of >100 LLM API endpoints to HF! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Speed and price are often just as important as quality when building applications with LLMs. We bring together all the data you need to consider all three when you need to pick a model and API provider.', 'raw': 'Speed and price are often just as important as quality when building applications with LLMs. We bring together all the data you need to consider all three when you need to pick a model and API provider.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Coverage:', 'raw': 'Coverage:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Quality (Index of evals, MMLU, Chatbot Arena, HumanEval, MT-Bench)', 'raw': '‣ Quality (Index of evals, MMLU, Chatbot Arena, HumanEval, MT-Bench)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Throughput (tokens/s: median, P5, P25, P75, P95)', 'raw': '‣ Throughput (tokens/s: median, P5, P25, P75, P95)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Latency (TTFT: median, P5, P25, P75, P95)', 'raw': '‣ Latency (TTFT: median, P5, P25, P75, P95)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ Context window', 'raw': '‣ Context window'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '‣ OpenAI library compatibility', 'raw': '‣ OpenAI library compatibility'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to Space: ', 'raw': 'Link to Space: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ArtificialAnalysis/LLM-Performance-Leaderboard'}, 'url': 'https://huggingface.co/spaces/ArtificialAnalysis/LLM-Performance-Leaderboard', 'raw': 'https://huggingface.co/spaces/ArtificialAnalysis/LLM-Performance-Leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog post: ', 'raw': 'Blog post: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/leaderboard-artificial-analysis', 'raw': 'https://huggingface.co/blog/leaderboard-artificial-analysis'}]","Excited to bring our benchmarking leaderboard of >100 LLM API endpoints to HF! + +Speed and price are often just as important as quality when building applications with LLMs. We bring together all the data you need to consider all three when you need to pick a model and API provider. + +Coverage: +‣ Quality (Index of evals, MMLU, Chatbot Arena, HumanEval, MT-Bench) +‣ Throughput (tokens/s: median, P5, P25, P75, P95) +‣ Latency (TTFT: median, P5, P25, P75, P95) +‣ Context window +‣ OpenAI library compatibility + +Link to Space: https://huggingface.co/spaces/ArtificialAnalysis/LLM-Performance-Leaderboard + +Blog post: https://huggingface.co/blog/leaderboard-artificial-analysis","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65ff2f9fcc7a4f35567b9098/dJTB2QfvXP6wlqA7tneQm.png'}]",[],"[{'reaction': '🔥', 'users': ['jeffboudier', 'victor', 'Kukedlc', 'clefourrier', 'Ijithad', 'ajibawa-2023', 'ShahabVFX', 'not-lain'], 'count': 8}, {'reaction': '❤️', 'users': ['clefourrier', 'PineappleParadiseDreams', 'not-lain'], 'count': 3}]",2024-05-03 16:40:46,2024-05-03 16:41:19.903,[],/posts/georgewritescode/101449295408409,2363,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg,638.0,Daniel van Strien,davanstrien,884026993565149,"[{'type': 'text', 'value': 'As part of the Data is Better Together MPEP project, we are now at the point where some translation efforts have successfully translated 500 highly ranked prompts into a new target language (amazing work from ', 'raw': 'As part of the Data is Better Together MPEP project, we are now at the point where some translation efforts have successfully translated 500 highly ranked prompts into a new target language (amazing work from '}, {'type': 'mention', 'user': 'Rijgersberg', 'raw': '@Rijgersberg'}, {'type': 'text', 'value': ' et al!)', 'raw': ' et al!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our next step is to use these translated prompts to evaluate the performance of LLMs for non English languages. ', 'raw': 'Our next step is to use these translated prompts to evaluate the performance of LLMs for non English languages. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Does LLM, as a judge, work outside of English?', 'raw': 'Does LLM, as a judge, work outside of English?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Ideally, it would be compelling to leverage LLMs to judge models for non-English since this significantly lowers the barrier to evaluating models (although it doesn't remove this barrier altogether). "", 'raw': ""Ideally, it would be compelling to leverage LLMs to judge models for non-English since this significantly lowers the barrier to evaluating models (although it doesn't remove this barrier altogether). ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What we want to know is:', 'raw': 'What we want to know is:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- does auto/LLM eval work in general for a particular language', 'raw': '- does auto/LLM eval work in general for a particular language'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- which model(s) works best as a judge', 'raw': '- which model(s) works best as a judge'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""- do LLMs' judgments of non-English models match human preferences? "", 'raw': ""- do LLMs' judgments of non-English models match human preferences? ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We're starting to think about how to approach this. If you have any ideas of possible approaches feel free to comment or join the discussion here: "", 'raw': ""We're starting to think about how to approach this. If you have any ideas of possible approaches feel free to comment or join the discussion here: ""}, {'type': 'link', 'href': 'https://github.com/huggingface/data-is-better-together/issues/61', 'raw': 'https://github.com/huggingface/data-is-better-together/issues/61'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Other ideas...', 'raw': 'Other ideas...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Could an approach like ', 'raw': 'Could an approach like '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.18796'}, 'url': 'https://huggingface.co/papers/2404.18796', 'raw': 'https://huggingface.co/papers/2404.18796', 'label': 'Replacing Judges with Juries: Evaluating LLM Generations with a Panel of\n Diverse Models (2404.18796)'}, {'type': 'text', 'value': ' with the SOA models for a particular language work? i.e., choose 4 of the best open LLMs for Arabic and use those at the pool of raters rather than relying on one powerful judge LLM? ', 'raw': ' with the SOA models for a particular language work? i.e., choose 4 of the best open LLMs for Arabic and use those at the pool of raters rather than relying on one powerful judge LLM? '}]","As part of the Data is Better Together MPEP project, we are now at the point where some translation efforts have successfully translated 500 highly ranked prompts into a new target language (amazing work from @Rijgersberg et al!) + +Our next step is to use these translated prompts to evaluate the performance of LLMs for non English languages. + +Does LLM, as a judge, work outside of English? + +Ideally, it would be compelling to leverage LLMs to judge models for non-English since this significantly lowers the barrier to evaluating models (although it doesn't remove this barrier altogether). + +What we want to know is: +- does auto/LLM eval work in general for a particular language +- which model(s) works best as a judge +- do LLMs' judgments of non-English models match human preferences? + +We're starting to think about how to approach this. If you have any ideas of possible approaches feel free to comment or join the discussion here: https://github.com/huggingface/data-is-better-together/issues/61 + +Other ideas... + +Could an approach like https://huggingface.co/papers/2404.18796 with the SOA models for a particular language work? i.e., choose 4 of the best open LLMs for Arabic and use those at the pool of raters rather than relying on one powerful judge LLM? ",[],"[{'_id': '6319b164bc8f3b313f7a1db0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6319b164bc8f3b313f7a1db0/Hh0kuwsAnD2AOKdL6PpRs.png', 'fullname': 'Edwin Rijgersberg', 'name': 'Rijgersberg', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 55}]","[{'reaction': '🤗', 'users': ['NePe', 'kristaller486', 'mmhamdy', 'Rijgersberg', 'alvarobartt'], 'count': 5}, {'reaction': '🔥', 'users': ['NePe', 'mmhamdy'], 'count': 2}]",2024-04-30 15:56:01,2024-04-30 15:56:01.259,[],/posts/davanstrien/884026993565149,1674,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,208120266952026,"[{'type': 'text', 'value': 'It’s exciting to see Apple’s commitment to opensource AI research lately. From a new awesome machine learning framework (mlx) to a family of purely open models (openELM) and incredibly visionary papers (LLMs in a flash, MM1) not mention the vibrant OSS community behind mlx - All alpha signs of something huge dropping in this year’s #AppleEvent & #WWDC', 'raw': 'It’s exciting to see Apple’s commitment to opensource AI research lately. From a new awesome machine learning framework (mlx) to a family of purely open models (openELM) and incredibly visionary papers (LLMs in a flash, MM1) not mention the vibrant OSS community behind mlx - All alpha signs of something huge dropping in this year’s #AppleEvent & #WWDC'}]","It’s exciting to see Apple’s commitment to opensource AI research lately. From a new awesome machine learning framework (mlx) to a family of purely open models (openELM) and incredibly visionary papers (LLMs in a flash, MM1) not mention the vibrant OSS community behind mlx - All alpha signs of something huge dropping in this year’s #AppleEvent & #WWDC","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/bklPxs9FQD-EZnPs5u1mX.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/08Y9CWM9uZhFZsS_xkCsb.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/tV2DrQ0t7uBdHH_CpoYxB.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/dJzoPbNix8uGRYWUsPuoS.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['victor', 'onandon', 'adorkin', 'julien-c', 'alielfilali01', 'luancloud'], 'count': 6}]",2024-04-30 14:12:05,2024-04-30 14:12:05.869,[],/posts/Jaward/208120266952026,1751,,0 +/avatars/5fe356d58c4c822a60370dbee8d78a69.svg,27.0,renyuxi,renyuxi,441311823846269,"[{'type': 'text', 'value': 'HyperSD released the 8-steps CFG-preserved LoRA just now, may be the first acceleration plugin that preserves the original CFG for both SDXL and SD15, you can adjust your negative prompts now!!!', 'raw': 'HyperSD released the 8-steps CFG-preserved LoRA just now, may be the first acceleration plugin that preserves the original CFG for both SDXL and SD15, you can adjust your negative prompts now!!!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hyper-SDXL-8steps-CFG-LoRA: ', 'raw': 'Hyper-SDXL-8steps-CFG-LoRA: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ByteDance/Hyper-SD'}, 'url': 'https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SDXL-8steps-CFG-lora.safetensors', 'raw': 'https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SDXL-8steps-CFG-lora.safetensors'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Hyper-SD15-8steps-CFG-LoRA: ', 'raw': 'Hyper-SD15-8steps-CFG-LoRA: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ByteDance/Hyper-SD'}, 'url': 'https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SD15-8steps-CFG-lora.safetensors', 'raw': 'https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SD15-8steps-CFG-lora.safetensors'}]","HyperSD released the 8-steps CFG-preserved LoRA just now, may be the first acceleration plugin that preserves the original CFG for both SDXL and SD15, you can adjust your negative prompts now!!! +Hyper-SDXL-8steps-CFG-LoRA: https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SDXL-8steps-CFG-lora.safetensors +Hyper-SD15-8steps-CFG-LoRA: https://huggingface.co/ByteDance/Hyper-SD/blob/main/Hyper-SD15-8steps-CFG-lora.safetensors",[],[],"[{'reaction': '🔥', 'users': ['YaTharThShaRma999', 'victor', 'radames', 'OzzyGT', 'multimodalart'], 'count': 5}]",2024-04-30 13:49:43,2024-04-30 17:47:14.850,"[{'_id': '6064e095abd8d3692e3e2ed6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg', 'fullname': 'Radamés Ajna', 'name': 'radames', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2555, 'isFollowing': False}]",/posts/renyuxi/441311823846269,1874,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1617264212503-603d25b75f9d390ab190b777.jpeg,1132.0,Pedro Cuenca,pcuenq,949530140359936,"[{'type': 'text', 'value': 'OpenELM in Core ML', 'raw': 'OpenELM in Core ML'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Apple recently released a set of efficient LLMs in sizes varying between 270M and 3B parameters. Their quality, according to benchmarks, is similar to OLMo models of comparable size, but they required half the pre-training tokens because they use layer-wise scaling, where the number of attention heads increases in deeper layers.', 'raw': 'Apple recently released a set of efficient LLMs in sizes varying between 270M and 3B parameters. Their quality, according to benchmarks, is similar to OLMo models of comparable size, but they required half the pre-training tokens because they use layer-wise scaling, where the number of attention heads increases in deeper layers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I converted these models to Core ML, for use on Apple Silicon, using this script: ', 'raw': 'I converted these models to Core ML, for use on Apple Silicon, using this script: '}, {'type': 'link', 'href': 'https://gist.github.com/pcuenca/23cd08443460bc90854e2a6f0f575084', 'raw': 'https://gist.github.com/pcuenca/23cd08443460bc90854e2a6f0f575084'}, {'type': 'text', 'value': '. The converted models were uploaded to this community in the Hub for anyone that wants to integrate inside their apps: ', 'raw': '. The converted models were uploaded to this community in the Hub for anyone that wants to integrate inside their apps: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194'}, 'url': 'https://huggingface.co/collections/corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194', 'raw': 'https://huggingface.co/collections/corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The conversion was done with the following parameters:', 'raw': 'The conversion was done with the following parameters:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Precision: float32.', 'raw': '- Precision: float32.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Sequence length: fixed to 128.', 'raw': '- Sequence length: fixed to 128.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With swift-transformers (', 'raw': 'With swift-transformers ('}, {'type': 'link', 'href': 'https://github.com/huggingface/swift-transformers', 'raw': 'https://github.com/huggingface/swift-transformers'}, {'type': 'text', 'value': ""), I'm getting about 56 tok/s with the 270M on my M1 Max, and 6.5 with the largest 3B model. These speeds could be improved by converting to "", 'raw': ""), I'm getting about 56 tok/s with the 270M on my M1 Max, and 6.5 with the largest 3B model. These speeds could be improved by converting to ""}, {'type': 'inline_code', 'code': 'float16', 'raw': '`float16`'}, {'type': 'text', 'value': "". However, there's some precision loss somewhere and generation doesn't work in "", 'raw': "". However, there's some precision loss somewhere and generation doesn't work in ""}, {'type': 'inline_code', 'code': 'float16', 'raw': '`float16`'}, {'type': 'text', 'value': "" mode yet. I'm looking into this and will keep you posted! Or take a look at this issue if you'd like to help: "", 'raw': "" mode yet. I'm looking into this and will keep you posted! Or take a look at this issue if you'd like to help: ""}, {'type': 'link', 'href': 'https://github.com/huggingface/swift-transformers/issues/95', 'raw': 'https://github.com/huggingface/swift-transformers/issues/95'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm also looking at optimizing inference using an experimental kv cache in swift-transformers. It's a bit tricky because the layers have varying number of attention heads, but I'm curious to see how much this feature can accelerate performance in this model family :)"", 'raw': ""I'm also looking at optimizing inference using an experimental kv cache in swift-transformers. It's a bit tricky because the layers have varying number of attention heads, but I'm curious to see how much this feature can accelerate performance in this model family :)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Regarding the instruct fine-tuned models, I don't know the chat template that was used. The models use the Llama 2 tokenizer, but the Llama 2 chat template, or the default Alignment Handbook one that was used to train, are not recognized. Any ideas on this welcome!"", 'raw': ""Regarding the instruct fine-tuned models, I don't know the chat template that was used. The models use the Llama 2 tokenizer, but the Llama 2 chat template, or the default Alignment Handbook one that was used to train, are not recognized. Any ideas on this welcome!""}]","OpenELM in Core ML + +Apple recently released a set of efficient LLMs in sizes varying between 270M and 3B parameters. Their quality, according to benchmarks, is similar to OLMo models of comparable size, but they required half the pre-training tokens because they use layer-wise scaling, where the number of attention heads increases in deeper layers. + +I converted these models to Core ML, for use on Apple Silicon, using this script: https://gist.github.com/pcuenca/23cd08443460bc90854e2a6f0f575084. The converted models were uploaded to this community in the Hub for anyone that wants to integrate inside their apps: https://huggingface.co/collections/corenet-community/openelm-core-ml-6630c6b19268a5d878cfd194 + +The conversion was done with the following parameters: +- Precision: float32. +- Sequence length: fixed to 128. + +With swift-transformers (https://github.com/huggingface/swift-transformers), I'm getting about 56 tok/s with the 270M on my M1 Max, and 6.5 with the largest 3B model. These speeds could be improved by converting to `float16`. However, there's some precision loss somewhere and generation doesn't work in `float16` mode yet. I'm looking into this and will keep you posted! Or take a look at this issue if you'd like to help: https://github.com/huggingface/swift-transformers/issues/95 + +I'm also looking at optimizing inference using an experimental kv cache in swift-transformers. It's a bit tricky because the layers have varying number of attention heads, but I'm curious to see how much this feature can accelerate performance in this model family :) + +Regarding the instruct fine-tuned models, I don't know the chat template that was used. The models use the Llama 2 tokenizer, but the Llama 2 chat template, or the default Alignment Handbook one that was used to train, are not recognized. Any ideas on this welcome!",[],[],"[{'reaction': '🔥', 'users': ['ZachNagengast', 'brightbit', 'reach-vb', 'victor', 'radames', 'JimRWallace', 'NickyNicky', '1duo', 'HelloCephalopod', 'not-lain', 'julien-c', 'MathisDevFP', 'maxrubin629', 'michellenicole', 'Rainierraoul', 'anthonymikinka', 'toddmath', 'irotem98', 'velyan'], 'count': 19}, {'reaction': '❤️', 'users': ['reach-vb', 'JimRWallace', 'SuperL3D', 'HelloCephalopod', 'not-lain', 'azhka', 'MathisDevFP', 'toddmath', 'velyan'], 'count': 9}, {'reaction': '🚀', 'users': ['reach-vb', 'HelloCephalopod', 'not-lain', 'MathisDevFP', 'Norod78', '0xjorgev'], 'count': 6}, {'reaction': '🤯', 'users': ['reach-vb', 'HelloCephalopod', 'not-lain', 'MathisDevFP'], 'count': 4}]",2024-04-30 13:26:53,2025-06-22 12:50:57.898,"[{'_id': '63a13e4445edac9f750b7cb9', 'avatarUrl': '/avatars/2523b978663d915705cb9ac4cb878d6f.svg', 'fullname': 'Stephen', 'name': 'smpanaro', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}, {'_id': '603d25b75f9d390ab190b777', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1617264212503-603d25b75f9d390ab190b777.jpeg', 'fullname': 'Pedro Cuenca', 'name': 'pcuenq', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1132, 'isFollowing': False}, {'_id': '639fe05cbeb95d698de716bc', 'avatarUrl': '/avatars/a517b6e94c8442cfa212ec0df42c9b1a.svg', 'fullname': 'Cebro', 'name': 'crbo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '683861adfdfd0cef3ef9fd7f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/683861adfdfd0cef3ef9fd7f/d6gmkoEX-BdOtny63-Yyu.jpeg', 'fullname': 'Irasubiza Viateur', 'name': 'VIATEUR-AI', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}]",/posts/pcuenq/949530140359936,7816,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/65eb213f00f1a613daafd462/uZuhXpD2B2aadwQxoL7DK.jpeg,57.0,Phenix Rhyder,phenixrhyder,406668118700661,"[{'type': 'text', 'value': 'Ai forest', 'raw': 'Ai forest'}]",Ai forest,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/65eb213f00f1a613daafd462/G6LgRktLcDlWgv5AlZ5cu.jpeg'}]",[],"[{'reaction': '🔥', 'users': ['samusenps', 'Masum889', 'sbrandeis'], 'count': 3}, {'reaction': '🤗', 'users': ['hjebuoebduede', 'dillfrescott'], 'count': 2}]",2024-04-30 10:34:51,2024-04-30 11:53:06.303,"[{'_id': '63d2ff88b734eaa4d4f86476', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1674772332792-noauth.jpeg', 'fullname': 'bhagaskara', 'name': 'bhagaskara', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/phenixrhyder/406668118700661,1662,,1 +/avatars/f383357c28a6221d62f49a07eecced03.svg,13.0,Patrick Haller,PatrickHaller,419031280358046,"[{'type': 'text', 'value': 'How Robust Is Your Model in Complex Code Generation Tasks? 🤔', 'raw': 'How Robust Is Your Model in Complex Code Generation Tasks? 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We\'ve launched the PECC benchmark to challenge chat models in code generation, drawing from the Advent of Code for programming tasks and the Euler Project for math-heavy challenges. This new task tests models with problems presented in both detailed prose and concise ""leet code"" styles, evaluating their ability to understand and solve complex coding issues and math problem in chat-based interactions.', 'raw': 'We\'ve launched the PECC benchmark to challenge chat models in code generation, drawing from the Advent of Code for programming tasks and the Euler Project for math-heavy challenges. This new task tests models with problems presented in both detailed prose and concise ""leet code"" styles, evaluating their ability to understand and solve complex coding issues and math problem in chat-based interactions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It seems that the Claude 3 models outperforme ChatGPT:', 'raw': 'It seems that the Claude 3 models outperforme ChatGPT:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model / Avg. (pass@3)', 'raw': 'Model / Avg. (pass@3)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Claude 3 Haiku / 27.67', 'raw': 'Claude 3 Haiku / 27.67'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GPT-3.5-Turbo / 23.75', 'raw': 'GPT-3.5-Turbo / 23.75'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mixtral-8x22B-Instruct-v0.1 / 8.35', 'raw': 'Mixtral-8x22B-Instruct-v0.1 / 8.35'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Read our Preprint📃: ', 'raw': 'Read our Preprint📃: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.18766'}, 'url': 'https://huggingface.co/papers/2404.18766', 'raw': 'https://huggingface.co/papers/2404.18766', 'label': 'PECC: Problem Extraction and Coding Challenges (2404.18766)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Look at the dataset🔎: ', 'raw': 'Look at the dataset🔎: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'PatrickHaller/pecc'}, 'url': 'https://huggingface.co/datasets/PatrickHaller/pecc', 'raw': 'https://huggingface.co/datasets/PatrickHaller/pecc'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""We also got accepted at LREC-COLING '24 🎉"", 'raw': ""We also got accepted at LREC-COLING '24 🎉""}]","How Robust Is Your Model in Complex Code Generation Tasks? 🤔 + +We've launched the PECC benchmark to challenge chat models in code generation, drawing from the Advent of Code for programming tasks and the Euler Project for math-heavy challenges. This new task tests models with problems presented in both detailed prose and concise ""leet code"" styles, evaluating their ability to understand and solve complex coding issues and math problem in chat-based interactions. + +It seems that the Claude 3 models outperforme ChatGPT: +Model / Avg. (pass@3) +Claude 3 Haiku / 27.67 +GPT-3.5-Turbo / 23.75 +Mixtral-8x22B-Instruct-v0.1 / 8.35 + +Read our Preprint📃: https://huggingface.co/papers/2404.18766 +Look at the dataset🔎: https://huggingface.co/datasets/PatrickHaller/pecc + +We also got accepted at LREC-COLING '24 🎉",[],[],"[{'reaction': '🔥', 'users': ['victor', 'barthfab', 'alanakbik'], 'count': 3}]",2024-04-30 08:31:04,2024-04-30 08:31:04.733,[],/posts/PatrickHaller/419031280358046,2005,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg,1222.0,Nishith Jain,KingNish,484550038911380,"[{'type': 'text', 'value': ""Introducing JARVIS Tony's voice assistant for You. "", 'raw': ""Introducing JARVIS Tony's voice assistant for You. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'JARVIS responds to all your questions in audio format.', 'raw': 'JARVIS responds to all your questions in audio format.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Must TRY -> ', 'raw': 'Must TRY -> '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'KingNish/JARVIS'}, 'url': 'https://huggingface.co/spaces/KingNish/JARVIS', 'raw': 'https://huggingface.co/spaces/KingNish/JARVIS'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Jarvis is currently equipped to accept text input and provide audio output.', 'raw': 'Jarvis is currently equipped to accept text input and provide audio output.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the future, it may also support audio input.', 'raw': 'In the future, it may also support audio input.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DEMO Video: ', 'raw': 'DEMO Video: '}, {'type': 'new_line', 'raw': '\n'}]","Introducing JARVIS Tony's voice assistant for You. + +JARVIS responds to all your questions in audio format. +Must TRY -> https://huggingface.co/spaces/KingNish/JARVIS + +Jarvis is currently equipped to accept text input and provide audio output. +In the future, it may also support audio input. + +DEMO Video: +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6612aedf09f16e7347dfa7e1/Ti6kgCUUizXYvKU7pPAJD.mp4'}]",[],"[{'reaction': '🔥', 'users': ['victor', 'sebastianking', 'SvCy', 'DamarJati', 'Ezi', 'Marc-Gloaguen', 'julien-c'], 'count': 7}, {'reaction': '❤️', 'users': ['KingNish'], 'count': 1}]",2024-04-30 08:23:33,2024-05-01 13:33:45.682,"[{'_id': '6456f3ca1ca9debab0554f8b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6456f3ca1ca9debab0554f8b/pQuSK-pS3NnJgXewbODvh.png', 'fullname': 'Damar Jati 🍫', 'name': 'DamarJati', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 173, 'isFollowing': False}, {'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}, {'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}]",/posts/KingNish/484550038911380,2665,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg,2555.0,Radamés Ajna,radames,775875912158761,"[{'type': 'text', 'value': ""I've built a custom component that integrates Rerun web viewer with Gradio, making it easier to share your demos as Gradio apps."", 'raw': ""I've built a custom component that integrates Rerun web viewer with Gradio, making it easier to share your demos as Gradio apps.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Basic snippet', 'raw': 'Basic snippet'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': '# pip install gradio_rerun gradio\nimport gradio as gr\nfrom gradio_rerun import Rerun\n\ngr.Interface(\n inputs=gr.File(file_count=""multiple"", type=""filepath""),\n outputs=Rerun(height=900),\n fn=lambda file_path: file_path,\n).launch()', 'raw': '```\n# pip install gradio_rerun gradio\nimport gradio as gr\nfrom gradio_rerun import Rerun\n\ngr.Interface(\n inputs=gr.File(file_count=""multiple"", type=""filepath""),\n outputs=Rerun(height=900),\n fn=lambda file_path: file_path,\n).launch()\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details here ', 'raw': 'More details here '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/radames/gradio_rerun', 'raw': 'https://huggingface.co/spaces/radames/gradio_rerun'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Source ', 'raw': 'Source '}, {'type': 'link', 'href': 'https://github.com/radames/gradio-rerun-viewer', 'raw': 'https://github.com/radames/gradio-rerun-viewer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow Rerun here ', 'raw': 'Follow Rerun here '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'rerun'}, 'url': 'https://huggingface.co/rerun', 'raw': 'https://huggingface.co/rerun', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/639c4b568a34ed9a4048efe5/GKiUy3SrG30QkAlKHf4io.png'}]","I've built a custom component that integrates Rerun web viewer with Gradio, making it easier to share your demos as Gradio apps. + +Basic snippet +``` +# pip install gradio_rerun gradio +import gradio as gr +from gradio_rerun import Rerun + +gr.Interface( + inputs=gr.File(file_count=""multiple"", type=""filepath""), + outputs=Rerun(height=900), + fn=lambda file_path: file_path, +).launch() +``` +More details here https://huggingface.co/spaces/radames/gradio_rerun +Source https://github.com/radames/gradio-rerun-viewer + +Follow Rerun here https://huggingface.co/rerun","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6064e095abd8d3692e3e2ed6/m1-Lnytc01ZbRzxxCcxvN.mp4'}]",[],"[{'reaction': '❤️', 'users': ['louisbrulenaudet', 'AlekseiPravdin', 'andreasnaoum', 'Tyfo87'], 'count': 4}]",2024-04-30 04:23:07,2024-04-30 04:23:37.694,[],/posts/radames/775875912158761,2533,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,649395735581384,"[{'type': 'text', 'value': '💰❌ 𝐑𝐞𝐬𝐞𝐚𝐫𝐜𝐡 𝐟𝐨𝐫 𝐭𝐡𝐞 𝐯𝐞𝐫𝐲 𝐆𝐏𝐔 𝐏𝐨𝐨𝐫 - 𝐒𝐜𝐚𝐥𝐢𝐧𝐠 𝐥𝐚𝐰𝐬 𝐫𝐞𝐩𝐥𝐢𝐜𝐚𝐭𝐢𝐨𝐧', 'raw': '💰❌ 𝐑𝐞𝐬𝐞𝐚𝐫𝐜𝐡 𝐟𝐨𝐫 𝐭𝐡𝐞 𝐯𝐞𝐫𝐲 𝐆𝐏𝐔 𝐏𝐨𝐨𝐫 - 𝐒𝐜𝐚𝐥𝐢𝐧𝐠 𝐥𝐚𝐰𝐬 𝐫𝐞𝐩𝐥𝐢𝐜𝐚𝐭𝐢𝐨𝐧'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎆 Good news: 𝘆𝗼𝘂 𝗰𝗮𝗻 𝗱𝗼 𝗰𝘂𝘁𝘁𝗶𝗻𝗴-𝗲𝗱𝗴𝗲 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝘄𝗶𝘁𝗵 𝗮 𝗰𝗮𝗹𝗰𝘂𝗹𝗮𝘁𝗼𝗿 𝗮𝗻𝗱 𝗠𝗶𝗰𝗿𝗼𝘀𝗼𝗳𝘁 𝗣𝗮𝗶𝗻𝘁 𝟮𝟬𝟬𝟲!', 'raw': '🎆 Good news: 𝘆𝗼𝘂 𝗰𝗮𝗻 𝗱𝗼 𝗰𝘂𝘁𝘁𝗶𝗻𝗴-𝗲𝗱𝗴𝗲 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝘄𝗶𝘁𝗵 𝗮 𝗰𝗮𝗹𝗰𝘂𝗹𝗮𝘁𝗼𝗿 𝗮𝗻𝗱 𝗠𝗶𝗰𝗿𝗼𝘀𝗼𝗳𝘁 𝗣𝗮𝗶𝗻𝘁 𝟮𝟬𝟬𝟲!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Chinchilla experiments (by Google DeepMind) ran hundreds of pre-trainings with models >1B parameters (I do not want to imagine how much that cost) to 𝗳𝗶𝗻𝗱 𝘁𝗵𝗲 𝗼𝗽𝘁𝗶𝗺𝗮𝗹 𝗿𝗮𝘁𝗶𝗼 𝗼𝗳 𝗺𝗼𝗱𝗲𝗹 𝘀𝗶𝘇𝗲 𝘃𝘀 𝘁𝗿𝗮𝗶𝗻𝗶𝗻𝗴 𝘁𝗼𝗸𝗲𝗻𝘀. Why is this question so important?', 'raw': 'The Chinchilla experiments (by Google DeepMind) ran hundreds of pre-trainings with models >1B parameters (I do not want to imagine how much that cost) to 𝗳𝗶𝗻𝗱 𝘁𝗵𝗲 𝗼𝗽𝘁𝗶𝗺𝗮𝗹 𝗿𝗮𝘁𝗶𝗼 𝗼𝗳 𝗺𝗼𝗱𝗲𝗹 𝘀𝗶𝘇𝗲 𝘃𝘀 𝘁𝗿𝗮𝗶𝗻𝗶𝗻𝗴 𝘁𝗼𝗸𝗲𝗻𝘀. Why is this question so important?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Well, you only ever have access to a fixed compute, counted in FLOPs (floating point operations). So if your model is bigger, you will have less compute to train on many tokens, and if you want to train on more tokens, your model will be smaller. When model trainings cost million, you absolutely need to get this right.', 'raw': 'Well, you only ever have access to a fixed compute, counted in FLOPs (floating point operations). So if your model is bigger, you will have less compute to train on many tokens, and if you want to train on more tokens, your model will be smaller. When model trainings cost million, you absolutely need to get this right.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The new paper ""Chinchilla Scaling: A replication attempt"" by Epoch AI sets on on the ambitious goal of reproducing this.', 'raw': 'The new paper ""Chinchilla Scaling: A replication attempt"" by Epoch AI sets on on the ambitious goal of reproducing this.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""But since the authors do not have infinite money, they decided to directly run their computations from DeepMind's own experiments! They took the figure from the last experiment (cf slide below), measured point positions, picked color codes, and ended up reconstructing the underlying data."", 'raw': ""But since the authors do not have infinite money, they decided to directly run their computations from DeepMind's own experiments! They took the figure from the last experiment (cf slide below), measured point positions, picked color codes, and ended up reconstructing the underlying data.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💥 They then just fit the scaling laws proposed by the Chinchilla Authors, but arrived at wildly different results! They find that as a rough rule of thumb, you should use 20 training tokens for each parameter in your model, instead of the 70 obtained in the original paper. They also point out inconsistencies in the paper, and unrealistically narrow confidence intervals.', 'raw': '💥 They then just fit the scaling laws proposed by the Chinchilla Authors, but arrived at wildly different results! They find that as a rough rule of thumb, you should use 20 training tokens for each parameter in your model, instead of the 70 obtained in the original paper. They also point out inconsistencies in the paper, and unrealistically narrow confidence intervals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ This only contradicts the results from the last (out of 3) experiments in the Chinchilla paper. And the model trained at the end of the Chinchilla paper still seems properly scaled.', 'raw': '➡️ This only contradicts the results from the last (out of 3) experiments in the Chinchilla paper. And the model trained at the end of the Chinchilla paper still seems properly scaled.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '✅ But it does show that a tiny bit more theoretical work can go a long way, especially given the huge financial costs that such an error can have!', 'raw': '✅ But it does show that a tiny bit more theoretical work can go a long way, especially given the huge financial costs that such an error can have!'}]","💰❌ 𝐑𝐞𝐬𝐞𝐚𝐫𝐜𝐡 𝐟𝐨𝐫 𝐭𝐡𝐞 𝐯𝐞𝐫𝐲 𝐆𝐏𝐔 𝐏𝐨𝐨𝐫 - 𝐒𝐜𝐚𝐥𝐢𝐧𝐠 𝐥𝐚𝐰𝐬 𝐫𝐞𝐩𝐥𝐢𝐜𝐚𝐭𝐢𝐨𝐧 + +🎆 Good news: 𝘆𝗼𝘂 𝗰𝗮𝗻 𝗱𝗼 𝗰𝘂𝘁𝘁𝗶𝗻𝗴-𝗲𝗱𝗴𝗲 𝗿𝗲𝘀𝗲𝗮𝗿𝗰𝗵 𝘄𝗶𝘁𝗵 𝗮 𝗰𝗮𝗹𝗰𝘂𝗹𝗮𝘁𝗼𝗿 𝗮𝗻𝗱 𝗠𝗶𝗰𝗿𝗼𝘀𝗼𝗳𝘁 𝗣𝗮𝗶𝗻𝘁 𝟮𝟬𝟬𝟲! + +The Chinchilla experiments (by Google DeepMind) ran hundreds of pre-trainings with models >1B parameters (I do not want to imagine how much that cost) to 𝗳𝗶𝗻𝗱 𝘁𝗵𝗲 𝗼𝗽𝘁𝗶𝗺𝗮𝗹 𝗿𝗮𝘁𝗶𝗼 𝗼𝗳 𝗺𝗼𝗱𝗲𝗹 𝘀𝗶𝘇𝗲 𝘃𝘀 𝘁𝗿𝗮𝗶𝗻𝗶𝗻𝗴 𝘁𝗼𝗸𝗲𝗻𝘀. Why is this question so important? +Well, you only ever have access to a fixed compute, counted in FLOPs (floating point operations). So if your model is bigger, you will have less compute to train on many tokens, and if you want to train on more tokens, your model will be smaller. When model trainings cost million, you absolutely need to get this right. + +The new paper ""Chinchilla Scaling: A replication attempt"" by Epoch AI sets on on the ambitious goal of reproducing this. + +But since the authors do not have infinite money, they decided to directly run their computations from DeepMind's own experiments! They took the figure from the last experiment (cf slide below), measured point positions, picked color codes, and ended up reconstructing the underlying data. + +💥 They then just fit the scaling laws proposed by the Chinchilla Authors, but arrived at wildly different results! They find that as a rough rule of thumb, you should use 20 training tokens for each parameter in your model, instead of the 70 obtained in the original paper. They also point out inconsistencies in the paper, and unrealistically narrow confidence intervals. + +➡️ This only contradicts the results from the last (out of 3) experiments in the Chinchilla paper. And the model trained at the end of the Chinchilla paper still seems properly scaled. + +✅ But it does show that a tiny bit more theoretical work can go a long way, especially given the huge financial costs that such an error can have!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/0qpW4Z30N3rzAuCslgmk9.png'}]",[],"[{'reaction': '👀', 'users': ['chansung', 'hllj', 'maywell', 'radames', 'victor', 'cstr'], 'count': 6}, {'reaction': '🤯', 'users': ['maywell', 'radames', 'louisbrulenaudet', 'KingNish', 'NotSiDDH'], 'count': 5}]",2024-04-29 17:16:25,2024-04-29 17:16:25.389,[],/posts/m-ric/649395735581384,2812,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1641203017724-noauth.png,138.0,Joao Gante,joaogante,103975211911493,"[{'type': 'text', 'value': 'Adding a long prompt can help you fight LLM hallucinations. However, if you know exactly how you want your LLM output constrained, there are much better strategies! 💪', 'raw': 'Adding a long prompt can help you fight LLM hallucinations. However, if you know exactly how you want your LLM output constrained, there are much better strategies! 💪'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Did you know you can force your LLM to ALWAYS generate a valid JSON file? Or to follow a well-defined answer template? You can do that and more with the 🤗 transformers-compatible ', 'raw': 'Did you know you can force your LLM to ALWAYS generate a valid JSON file? Or to follow a well-defined answer template? You can do that and more with the 🤗 transformers-compatible '}, {'type': 'inline_code', 'code': 'outlines', 'raw': '`outlines`'}, {'type': 'text', 'value': ' library.', 'raw': ' library.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It doesn't only allow you to master your LLM -- your text generation application will also become faster! 🔥 The more constrained your text generation is, the bigger speedups you'll see!"", 'raw': ""It doesn't only allow you to master your LLM -- your text generation application will also become faster! 🔥 The more constrained your text generation is, the bigger speedups you'll see!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow ', 'raw': 'Follow '}, {'type': 'mention', 'user': 'remi', 'raw': '@remi'}, {'type': 'text', 'value': ' and other ', 'raw': ' and other '}, {'type': 'inline_code', 'code': 'outlines', 'raw': '`outlines`'}, {'type': 'text', 'value': ' folks to stay on top of the constrained generation game 🧠', 'raw': ' folks to stay on top of the constrained generation game 🧠'}]","Adding a long prompt can help you fight LLM hallucinations. However, if you know exactly how you want your LLM output constrained, there are much better strategies! 💪 + +Did you know you can force your LLM to ALWAYS generate a valid JSON file? Or to follow a well-defined answer template? You can do that and more with the 🤗 transformers-compatible `outlines` library. + +It doesn't only allow you to master your LLM -- your text generation application will also become faster! 🔥 The more constrained your text generation is, the bigger speedups you'll see! + +Follow @remi and other `outlines` folks to stay on top of the constrained generation game 🧠","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61d2c54d76c37de24cfed058/Oj2CAYMQzelOJBmcEZ4BC.png'}]","[{'_id': '5de8d7255c51de1bfc829f99', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5de8d7255c51de1bfc829f99/98fxu2lJMyEsh2j2PtsAs.jpeg', 'fullname': 'Remi Louf', 'name': 'remi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 30}]","[{'reaction': '🔥', 'users': ['victor', 'Andyrasika', 'a9i', 'bunnycore', 'fcakyon'], 'count': 5}]",2024-04-29 16:50:52,2024-04-29 16:50:52.057,[],/posts/joaogante/103975211911493,2710,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png,122.0,Asankhaya Sharma,codelion,139450027221410,"[{'type': 'text', 'value': 'Happy to announce the open source framework to turbo charge devops called patchwork - ', 'raw': 'Happy to announce the open source framework to turbo charge devops called patchwork - '}, {'type': 'link', 'href': 'https://github.com/patched-codes/patchwork', 'raw': 'https://github.com/patched-codes/patchwork'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can use it to build patchflows - workflows that use LLMs for software development tasks like bug fixing, pull request review, library migration and documentation. ', 'raw': 'You can use it to build patchflows - workflows that use LLMs for software development tasks like bug fixing, pull request review, library migration and documentation. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Supports any LLM of your choice including our own MoE model - ', 'raw': 'Supports any LLM of your choice including our own MoE model - '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'patched-codes/patched-mix-4x7B'}, 'url': 'https://huggingface.co/patched-codes/patched-mix-4x7B', 'raw': 'https://huggingface.co/patched-codes/patched-mix-4x7B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Give it a try!', 'raw': 'Give it a try!'}]","Happy to announce the open source framework to turbo charge devops called patchwork - https://github.com/patched-codes/patchwork + +You can use it to build patchflows - workflows that use LLMs for software development tasks like bug fixing, pull request review, library migration and documentation. + +Supports any LLM of your choice including our own MoE model - https://huggingface.co/patched-codes/patched-mix-4x7B + +Give it a try!",[],[],"[{'reaction': '🔥', 'users': ['codelion', 'milkowski', 'victor', 'AlekseiPravdin'], 'count': 4}, {'reaction': '🚀', 'users': ['codelion', 't1u1', 'spooner2'], 'count': 3}]",2024-04-24 14:28:39,2024-05-14 04:32:56.564,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '62f32eab52ad88c930bb3f3b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png', 'fullname': 'Asankhaya Sharma', 'name': 'codelion', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 122, 'isFollowing': False}]",/posts/codelion/139450027221410,1765,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,504351998498924,"[{'type': 'text', 'value': 'Got access to Devin today and boy it’s been rocking it - 10x engineer on pure software dev tasks, albeit falls at the mercy of ML/AI tasks. Still a promising work of daring-engineering feat, wishing all the best to the team @cognition_labs', 'raw': 'Got access to Devin today and boy it’s been rocking it - 10x engineer on pure software dev tasks, albeit falls at the mercy of ML/AI tasks. Still a promising work of daring-engineering feat, wishing all the best to the team @cognition_labs'}]","Got access to Devin today and boy it’s been rocking it - 10x engineer on pure software dev tasks, albeit falls at the mercy of ML/AI tasks. Still a promising work of daring-engineering feat, wishing all the best to the team @cognition_labs","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/GnSSwonzZ5Y2Qeg7Cj8jE.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/SeevBLK5FoL4MkrLxXkUz.png'}]",[],"[{'reaction': '👀', 'users': ['jacob-valdez', 'Abubakr18', 'tomaarsen', 'victor', 'mariachus', 'Joseph717171'], 'count': 6}]",2024-04-24 13:01:26,2024-04-24 19:11:21.143,"[{'_id': '62f32eab52ad88c930bb3f3b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1677134945205-62f32eab52ad88c930bb3f3b.png', 'fullname': 'Asankhaya Sharma', 'name': 'codelion', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 122, 'isFollowing': False}, {'_id': '6438a9027de34e8ea7e4b257', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg', 'fullname': 'Jaward Sesay', 'name': 'Jaward', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 331, 'isFollowing': False}, {'_id': '64175bc2b03817ada642291f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64175bc2b03817ada642291f/V3mhc8Y0saSgXbp--2HcE.png', 'fullname': 'Kh', 'name': 'raidhon', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}]",/posts/Jaward/504351998498924,1812,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,555649356176130,"[{'type': 'text', 'value': 'OpenELM', 'raw': 'OpenELM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'An Efficient Language Model Family with Open-source Training and Inference Framework', 'raw': 'An Efficient Language Model Family with Open-source Training and Inference Framework'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.14619'}, 'url': 'https://huggingface.co/papers/2404.14619', 'raw': 'https://huggingface.co/papers/2404.14619', 'label': 'OpenELM: An Efficient Language Model Family with Open-source Training\n and Inference Framework (2404.14619)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The reproducibility and transparency of large language models are crucial for advancing open research, ensuring the trustworthiness of results, and enabling investigations into data and model biases, as well as potential risks. To this end, we release OpenELM, a state-of-the-art open language model. OpenELM uses a layer-wise scaling strategy to efficiently allocate parameters within each layer of the transformer model, leading to enhanced accuracy. For example, with a parameter budget of approximately one billion parameters, OpenELM exhibits a 2.36% improvement in accuracy compared to OLMo while requiring 2times fewer pre-training tokens. Diverging from prior practices that only provide model weights and inference code, and pre-train on private datasets, our release includes the complete framework for training and evaluation of the language model on publicly available datasets, including training logs, multiple checkpoints, and pre-training configurations. We also release code to convert models to MLX library for inference and fine-tuning on Apple devices. This comprehensive release aims to empower and strengthen the open research community, paving the way for future open research endeavors. ', 'raw': 'The reproducibility and transparency of large language models are crucial for advancing open research, ensuring the trustworthiness of results, and enabling investigations into data and model biases, as well as potential risks. To this end, we release OpenELM, a state-of-the-art open language model. OpenELM uses a layer-wise scaling strategy to efficiently allocate parameters within each layer of the transformer model, leading to enhanced accuracy. For example, with a parameter budget of approximately one billion parameters, OpenELM exhibits a 2.36% improvement in accuracy compared to OLMo while requiring 2times fewer pre-training tokens. Diverging from prior practices that only provide model weights and inference code, and pre-train on private datasets, our release includes the complete framework for training and evaluation of the language model on publicly available datasets, including training logs, multiple checkpoints, and pre-training configurations. We also release code to convert models to MLX library for inference and fine-tuning on Apple devices. This comprehensive release aims to empower and strengthen the open research community, paving the way for future open research endeavors. '}, {'type': 'new_line', 'raw': '\n'}]","OpenELM + +An Efficient Language Model Family with Open-source Training and Inference Framework + +https://huggingface.co/papers/2404.14619 + +The reproducibility and transparency of large language models are crucial for advancing open research, ensuring the trustworthiness of results, and enabling investigations into data and model biases, as well as potential risks. To this end, we release OpenELM, a state-of-the-art open language model. OpenELM uses a layer-wise scaling strategy to efficiently allocate parameters within each layer of the transformer model, leading to enhanced accuracy. For example, with a parameter budget of approximately one billion parameters, OpenELM exhibits a 2.36% improvement in accuracy compared to OLMo while requiring 2times fewer pre-training tokens. Diverging from prior practices that only provide model weights and inference code, and pre-train on private datasets, our release includes the complete framework for training and evaluation of the language model on publicly available datasets, including training logs, multiple checkpoints, and pre-training configurations. We also release code to convert models to MLX library for inference and fine-tuning on Apple devices. This comprehensive release aims to empower and strengthen the open research community, paving the way for future open research endeavors. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/BUZBz9I3sYwkreUFwZRKH.png'}]",[],"[{'reaction': '👍', 'users': ['Vishwas1', 'mariachus', 'MexIvanov'], 'count': 3}]",2024-04-24 12:26:41,2024-04-24 12:26:41.337,[],/posts/akhaliq/555649356176130,3138,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,217829762957938,"[{'type': 'text', 'value': 'I have built a Space to compare different vision language model outputs, which model should I add next? 👀', 'raw': 'I have built a Space to compare different vision language model outputs, which model should I add next? 👀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try them yourself here ', 'raw': 'Try them yourself here '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'merve/compare_VLMs'}, 'url': 'https://huggingface.co/spaces/merve/compare_VLMs', 'raw': 'https://huggingface.co/spaces/merve/compare_VLMs'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","I have built a Space to compare different vision language model outputs, which model should I add next? 👀 +Try them yourself here https://huggingface.co/spaces/merve/compare_VLMs ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/mqw2q8u2wjg993dNLjRGU.png'}]",[],"[{'reaction': '🔥', 'users': ['adorkin', 'fdaudens', 'louisbrulenaudet'], 'count': 3}]",2024-04-24 11:18:31,2024-04-24 11:43:59.076,"[{'_id': '6375f45a319390860284394b', 'avatarUrl': '/avatars/ea2db4cb97adfba9e3075206f15ebe13.svg', 'fullname': 'Alina', 'name': 'iblub', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/merve/217829762957938,2324,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6455cc8d679315e4ef16fbec/M6Cfifn05BUzkCFd2QDIT.png,159.0,Tim Dolan,macadeliccc,273809723337347,"[{'type': 'text', 'value': 'Fine tune Phi-3 using samatha themed dataset and Huggingface SFT trainer!', 'raw': 'Fine tune Phi-3 using samatha themed dataset and Huggingface SFT trainer!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this colab, we simply apply a supervised finetune to phi-3 using the sharegpt format.', 'raw': 'In this colab, we simply apply a supervised finetune to phi-3 using the sharegpt format.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'def formatting_prompts_func(examples):\n convos = examples[""conversations""]\n texts = []\n mapper = {""system"": ""system\\n"", ""human"": ""\\nuser\\n"", ""gpt"": ""\\nassistant\\n""}\n end_mapper = {""system"": """", ""human"": """", ""gpt"": """"}\n for convo in convos:\n text = """".join(f""{mapper[(turn := x[\'from\'])]} {x[\'value\']}\\n{end_mapper[turn]}"" for x in convo)\n texts.append(f""{text}{EOS_TOKEN}"") \n return {""text"": texts}\n\ndataset = dataset.map(formatting_prompts_func, batched=True)\nprint(dataset[\'text\'][8])', 'raw': '```\ndef formatting_prompts_func(examples):\n convos = examples[""conversations""]\n texts = []\n mapper = {""system"": ""system\\n"", ""human"": ""\\nuser\\n"", ""gpt"": ""\\nassistant\\n""}\n end_mapper = {""system"": """", ""human"": """", ""gpt"": """"}\n for convo in convos:\n text = """".join(f""{mapper[(turn := x[\'from\'])]} {x[\'value\']}\\n{end_mapper[turn]}"" for x in convo)\n texts.append(f""{text}{EOS_TOKEN}"") \n return {""text"": texts}\n\ndataset = dataset.map(formatting_prompts_func, batched=True)\nprint(dataset[\'text\'][8])\n\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Opus Samantha consists of 1848 samples with the samantha personality. The dataset covers a wide variety of topics such as logical reasoning, mathematics, legal, and rp. ', 'raw': 'Opus Samantha consists of 1848 samples with the samantha personality. The dataset covers a wide variety of topics such as logical reasoning, mathematics, legal, and rp. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This notebook serves as a viable option to finetune Phi-3 until Unsloth supports phi-3, which should be very soon. When that happens check out AutoSloth for both SFT, DPO, and langfuse format RAG fine tuning on free tier colab hardware.', 'raw': 'This notebook serves as a viable option to finetune Phi-3 until Unsloth supports phi-3, which should be very soon. When that happens check out AutoSloth for both SFT, DPO, and langfuse format RAG fine tuning on free tier colab hardware.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Resources:', 'raw': 'Resources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'macadeliccc/opus_samantha'}, 'url': 'https://huggingface.co/datasets/macadeliccc/opus_samantha', 'raw': 'https://huggingface.co/datasets/macadeliccc/opus_samantha'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Colab: ', 'raw': 'Colab: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1e8LILflDQ2Me52hwS7uIfuJ9DxE2oQzM?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1e8LILflDQ2Me52hwS7uIfuJ9DxE2oQzM?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AutoSloth: ', 'raw': 'AutoSloth: '}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1Zo0sVEb2lqdsUm9dy2PTzGySxdF9CNkc#scrollTo=bpimlPXVz-CZ', 'raw': 'https://colab.research.google.com/drive/1Zo0sVEb2lqdsUm9dy2PTzGySxdF9CNkc#scrollTo=bpimlPXVz-CZ'}]","Fine tune Phi-3 using samatha themed dataset and Huggingface SFT trainer! + +In this colab, we simply apply a supervised finetune to phi-3 using the sharegpt format. + +``` +def formatting_prompts_func(examples): + convos = examples[""conversations""] + texts = [] + mapper = {""system"": ""system\n"", ""human"": ""\nuser\n"", ""gpt"": ""\nassistant\n""} + end_mapper = {""system"": """", ""human"": """", ""gpt"": """"} + for convo in convos: + text = """".join(f""{mapper[(turn := x['from'])]} {x['value']}\n{end_mapper[turn]}"" for x in convo) + texts.append(f""{text}{EOS_TOKEN}"") + return {""text"": texts} + +dataset = dataset.map(formatting_prompts_func, batched=True) +print(dataset['text'][8]) + +``` +Opus Samantha consists of 1848 samples with the samantha personality. The dataset covers a wide variety of topics such as logical reasoning, mathematics, legal, and rp. + +This notebook serves as a viable option to finetune Phi-3 until Unsloth supports phi-3, which should be very soon. When that happens check out AutoSloth for both SFT, DPO, and langfuse format RAG fine tuning on free tier colab hardware. + +Resources: +Dataset: https://huggingface.co/datasets/macadeliccc/opus_samantha +Colab: https://colab.research.google.com/drive/1e8LILflDQ2Me52hwS7uIfuJ9DxE2oQzM?usp=sharing +AutoSloth: https://colab.research.google.com/drive/1Zo0sVEb2lqdsUm9dy2PTzGySxdF9CNkc#scrollTo=bpimlPXVz-CZ",[],[],"[{'reaction': '🔥', 'users': ['victor', 'KingNish', 'Qwoook', 'midesk', 'pabloce', 'lunarflu', 'andrewatef', 'EddyGiusepe'], 'count': 8}, {'reaction': '👍', 'users': ['santyzenith', 'clavel', 'lunarflu', 'Norod78', 'trtm'], 'count': 5}, {'reaction': '🚀', 'users': ['trtm'], 'count': 1}]",2024-04-24 05:20:15,2024-04-24 05:20:15.665,[],/posts/macadeliccc/273809723337347,4592,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,283128537556650,"[{'type': 'text', 'value': 'Complete Guide to SUPIR Enhancing and Upscaling Images Like in Sci-Fi Movies on Your PC : ', 'raw': 'Complete Guide to SUPIR Enhancing and Upscaling Images Like in Sci-Fi Movies on Your PC : '}, {'type': 'link', 'href': 'https://youtu.be/OYxVEvDf284', 'raw': 'https://youtu.be/OYxVEvDf284'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this video, I explain how to 1 click install and use the most advanced image upscaler / enhancer in the world that is both commercially and open source available. The upscaler that I am going to introduce you is open source #SUPIR and the model is free to use. SUPIR upscaler is many times better than both paid Topaz AI and Magnific AI and you can use this upscaler on your computer for free forever. The difference of SUPIR vs #Topaz and #Magnific is like ages. So in this tutorial you are going to learn everything about how to install, update and use SUPIR upscaler on your personal computer. The video shows Windows but it works perfectly fine on Linux as well.', 'raw': 'In this video, I explain how to 1 click install and use the most advanced image upscaler / enhancer in the world that is both commercially and open source available. The upscaler that I am going to introduce you is open source #SUPIR and the model is free to use. SUPIR upscaler is many times better than both paid Topaz AI and Magnific AI and you can use this upscaler on your computer for free forever. The difference of SUPIR vs #Topaz and #Magnific is like ages. So in this tutorial you are going to learn everything about how to install, update and use SUPIR upscaler on your personal computer. The video shows Windows but it works perfectly fine on Linux as well.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Scripts Download Link ⤵️', 'raw': 'Scripts Download Link ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.patreon.com/posts/99176057', 'raw': 'https://www.patreon.com/posts/99176057'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Samplers and Text CFG (Text Guidance Scale) Comparison Link ⤵️', 'raw': 'Samplers and Text CFG (Text Guidance Scale) Comparison Link ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://imgsli.com/MjU2ODQz/2/1', 'raw': 'https://imgsli.com/MjU2ODQz/2/1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How to install accurate Python, Git and FFmpeg on Windows Tutorial ⤵️', 'raw': 'How to install accurate Python, Git and FFmpeg on Windows Tutorial ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/-NjNy7afOQ0', 'raw': 'https://youtu.be/-NjNy7afOQ0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Full DreamBooth / Fine-tuning Tutorial ⤵️', 'raw': 'Full DreamBooth / Fine-tuning Tutorial ⤵️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://youtu.be/0t5l6CP9eBg', 'raw': 'https://youtu.be/0t5l6CP9eBg'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild : ', 'raw': 'Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild : '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2401.13627', 'raw': 'https://arxiv.org/abs/2401.13627'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors introduce SUPIR (Scaling-UP Image Restoration), a groundbreaking image restoration method that harnesses generative prior and the power of model scaling up. Leveraging multi-modal techniques and advanced generative prior, SUPIR marks a significant advance in intelligent and realistic image restoration. As a pivotal catalyst within SUPIR, model scaling dramatically enhances its capabilities and demonstrates new potential for image restoration. Authors collect a dataset comprising 20 million high-resolution, high-quality images for model training, each enriched with descriptive text annotations. SUPIR provides the capability to restore images guided by textual prompts, broadening its application scope and potential', 'raw': 'Authors introduce SUPIR (Scaling-UP Image Restoration), a groundbreaking image restoration method that harnesses generative prior and the power of model scaling up. Leveraging multi-modal techniques and advanced generative prior, SUPIR marks a significant advance in intelligent and realistic image restoration. As a pivotal catalyst within SUPIR, model scaling dramatically enhances its capabilities and demonstrates new potential for image restoration. Authors collect a dataset comprising 20 million high-resolution, high-quality images for model training, each enriched with descriptive text annotations. SUPIR provides the capability to restore images guided by textual prompts, broadening its application scope and potential'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Complete Guide to SUPIR Enhancing and Upscaling Images Like in Sci-Fi Movies on Your PC : https://youtu.be/OYxVEvDf284 + +In this video, I explain how to 1 click install and use the most advanced image upscaler / enhancer in the world that is both commercially and open source available. The upscaler that I am going to introduce you is open source #SUPIR and the model is free to use. SUPIR upscaler is many times better than both paid Topaz AI and Magnific AI and you can use this upscaler on your computer for free forever. The difference of SUPIR vs #Topaz and #Magnific is like ages. So in this tutorial you are going to learn everything about how to install, update and use SUPIR upscaler on your personal computer. The video shows Windows but it works perfectly fine on Linux as well. + +Scripts Download Link ⤵️ +https://www.patreon.com/posts/99176057 + +Samplers and Text CFG (Text Guidance Scale) Comparison Link ⤵️ +https://imgsli.com/MjU2ODQz/2/1 + +How to install accurate Python, Git and FFmpeg on Windows Tutorial ⤵️ +https://youtu.be/-NjNy7afOQ0 + +Full DreamBooth / Fine-tuning Tutorial ⤵️ +https://youtu.be/0t5l6CP9eBg + +Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild : https://arxiv.org/abs/2401.13627 + +Authors introduce SUPIR (Scaling-UP Image Restoration), a groundbreaking image restoration method that harnesses generative prior and the power of model scaling up. Leveraging multi-modal techniques and advanced generative prior, SUPIR marks a significant advance in intelligent and realistic image restoration. As a pivotal catalyst within SUPIR, model scaling dramatically enhances its capabilities and demonstrates new potential for image restoration. Authors collect a dataset comprising 20 million high-resolution, high-quality images for model training, each enriched with descriptive text annotations. SUPIR provides the capability to restore images guided by textual prompts, broadening its application scope and potential + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/8U9hqTJMrfYXegEiUKAKf.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/GSdIiTLtCde6PcUho0A_5.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/3nyjLGgk_Lx84FNDRjo30.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/2vOD-meTTjKEdTJmgyNJW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/sfLq1amHzyHwbWWmgWXI1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/SG-ukXGVxgvv0t1YkOaZa.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/IpeQisCgGSju0fP_7A9gw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/7gtS9HGqRCI5fej7xyfKk.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/tOFi7YkQXNdO14BuZUvtS.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/Yf8BIKb-QQkr3IrUYqDqQ.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/GcW3ikvB0ZQJXhUNWpZRs.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/S9vyUTKZmvxtB4uACfDC1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/peSN9wGn6a8HhBRmncyx6.png'}]",[],"[{'reaction': '🔥', 'users': ['MonsterMMORPG', 'DmitryRyumin', 'splendiferousConiferous', 'sombaba', 'DonRichards', 'timothyupai', 'catastropiyush', 'vipulg', 'louisbrulenaudet'], 'count': 9}, {'reaction': '👍', 'users': ['MonsterMMORPG', 'distantquant', 'cocodark', 'StatsGary', 'dotdotgod', 'kevinpics', 'mtasic85'], 'count': 7}, {'reaction': '❤️', 'users': ['MonsterMMORPG', 'StatsGary'], 'count': 2}, {'reaction': '🤯', 'users': ['MonsterMMORPG', 'di-zhang-fdu'], 'count': 2}, {'reaction': '🚀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤗', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '😎', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '➕', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🧠', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '👀', 'users': ['MonsterMMORPG'], 'count': 1}, {'reaction': '🤝', 'users': ['MonsterMMORPG'], 'count': 1}]",2024-04-23 22:39:17,2024-05-23 19:55:12.803,"[{'_id': '656aeebe3e60cb262170bf4d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/xg3Ow02BGX1BoruDYaAr9.jpeg', 'fullname': 'Fabrice TIERCELIN', 'name': 'Fabrice-TIERCELIN', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23, 'isFollowing': False}, {'_id': '6345bd89fe134dfd7a0dba40', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg', 'fullname': 'Furkan Gözükara', 'name': 'MonsterMMORPG', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 646, 'isFollowing': False}]",/posts/MonsterMMORPG/283128537556650,4111,,9 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,909179600473160,"[{'type': 'text', 'value': 'Phi-3 Technical Report', 'raw': 'Phi-3 Technical Report'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A Highly Capable Language Model Locally on Your Phone', 'raw': 'A Highly Capable Language Model Locally on Your Phone'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.14219'}, 'url': 'https://huggingface.co/papers/2404.14219', 'raw': 'https://huggingface.co/papers/2404.14219', 'label': 'Phi-3 Technical Report: A Highly Capable Language Model Locally on Your\n Phone (2404.14219)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We introduce phi-3-mini, a 3.8 billion parameter language model trained on 3.3 trillion tokens, whose overall performance, as measured by both academic benchmarks and internal testing, rivals that of models such as Mixtral 8x7B and GPT-3.5 (e.g., phi-3-mini achieves 69% on MMLU and 8.38 on MT-bench), despite being small enough to be deployed on a phone. The innovation lies entirely in our dataset for training, a scaled-up version of the one used for phi-2, composed of heavily filtered web data and synthetic data. The model is also further aligned for robustness, safety, and chat format. We also provide some initial parameter-scaling results with a 7B and 14B models trained for 4.8T tokens, called phi-3-small and phi-3-medium, both significantly more capable than phi-3-mini (e.g., respectively 75% and 78% on MMLU, and 8.7 and 8.9 on MT-bench).', 'raw': 'We introduce phi-3-mini, a 3.8 billion parameter language model trained on 3.3 trillion tokens, whose overall performance, as measured by both academic benchmarks and internal testing, rivals that of models such as Mixtral 8x7B and GPT-3.5 (e.g., phi-3-mini achieves 69% on MMLU and 8.38 on MT-bench), despite being small enough to be deployed on a phone. The innovation lies entirely in our dataset for training, a scaled-up version of the one used for phi-2, composed of heavily filtered web data and synthetic data. The model is also further aligned for robustness, safety, and chat format. We also provide some initial parameter-scaling results with a 7B and 14B models trained for 4.8T tokens, called phi-3-small and phi-3-medium, both significantly more capable than phi-3-mini (e.g., respectively 75% and 78% on MMLU, and 8.7 and 8.9 on MT-bench).'}, {'type': 'new_line', 'raw': '\n'}]","Phi-3 Technical Report + +A Highly Capable Language Model Locally on Your Phone + +https://huggingface.co/papers/2404.14219 + +We introduce phi-3-mini, a 3.8 billion parameter language model trained on 3.3 trillion tokens, whose overall performance, as measured by both academic benchmarks and internal testing, rivals that of models such as Mixtral 8x7B and GPT-3.5 (e.g., phi-3-mini achieves 69% on MMLU and 8.38 on MT-bench), despite being small enough to be deployed on a phone. The innovation lies entirely in our dataset for training, a scaled-up version of the one used for phi-2, composed of heavily filtered web data and synthetic data. The model is also further aligned for robustness, safety, and chat format. We also provide some initial parameter-scaling results with a 7B and 14B models trained for 4.8T tokens, called phi-3-small and phi-3-medium, both significantly more capable than phi-3-mini (e.g., respectively 75% and 78% on MMLU, and 8.7 and 8.9 on MT-bench). +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/tLvG6rLJM2at4bOcbfj3D.png'}]",[],"[{'reaction': '👀', 'users': ['himanshubeniwal', 'wchai', 'RahulSharma0', 'dotdotgod', 'AtAndDev'], 'count': 5}, {'reaction': '🤝', 'users': ['victor', 'wchai', 'AtAndDev'], 'count': 3}, {'reaction': '🔥', 'users': ['Dang', 'alielfilali01', 'AtAndDev'], 'count': 3}, {'reaction': '🚀', 'users': ['taufiqdp', 'AtAndDev'], 'count': 2}]",2024-04-23 21:25:53,2024-04-23 21:25:53.520,[],/posts/akhaliq/909179600473160,3515,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,434720589450701,"[{'type': 'text', 'value': 'Testing the Phi-3-mini 4k on HuggingChat. How well can it craft a tweet? ', 'raw': 'Testing the Phi-3-mini 4k on HuggingChat. How well can it craft a tweet? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Not bad at all: ', 'raw': 'Not bad at all: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'Excited to unveil phi-3-mini, a compact yet powerful 3.8B parameter model, outperforming giants like Mixtral & GPT-3.5 on benchmarks & safe for phones! *\n#Al #Phi3 #LanguageModel #Techinnovation #Phi3Miniml', 'raw': '```\nExcited to unveil phi-3-mini, a compact yet powerful 3.8B parameter model, outperforming giants like Mixtral & GPT-3.5 on benchmarks & safe for phones! *\n#Al #Phi3 #LanguageModel #Techinnovation #Phi3Miniml\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The models are here:', 'raw': 'The models are here:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Phi-3-Mini-4K-Instruct: ', 'raw': '- Phi-3-Mini-4K-Instruct: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-3-mini-4k-instruct'}, 'url': 'https://huggingface.co/microsoft/Phi-3-mini-4k-instruct', 'raw': 'https://huggingface.co/microsoft/Phi-3-mini-4k-instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '-Phi-3-Mini-128K-Instruct: ', 'raw': '-Phi-3-Mini-128K-Instruct: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'microsoft/Phi-3-mini-128k-instruct'}, 'url': 'https://huggingface.co/microsoft/Phi-3-mini-128k-instruct', 'raw': 'https://huggingface.co/microsoft/Phi-3-mini-128k-instruct'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out in Hugging Chat: ', 'raw': 'Try it out in Hugging Chat: '}, {'type': 'link', 'href': 'https://huggingface.co/chat/models/microsoft/Phi-3-mini-4k-instruct', 'raw': 'https://huggingface.co/chat/models/microsoft/Phi-3-mini-4k-instruct'}]","Testing the Phi-3-mini 4k on HuggingChat. How well can it craft a tweet? + +Not bad at all: +``` +Excited to unveil phi-3-mini, a compact yet powerful 3.8B parameter model, outperforming giants like Mixtral & GPT-3.5 on benchmarks & safe for phones! * +#Al #Phi3 #LanguageModel #Techinnovation #Phi3Miniml +``` +The models are here: +- Phi-3-Mini-4K-Instruct: https://huggingface.co/microsoft/Phi-3-mini-4k-instruct +-Phi-3-Mini-128K-Instruct: https://huggingface.co/microsoft/Phi-3-mini-128k-instruct + +Try it out in Hugging Chat: https://huggingface.co/chat/models/microsoft/Phi-3-mini-4k-instruct","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/o50AInlRiNmtCt_En0DqG.mp4'}]",[],"[{'reaction': '🤗', 'users': ['zhou20120904'], 'count': 1}]",2024-04-23 19:10:11,2024-08-26 10:00:29.594,[],/posts/fdaudens/434720589450701,2765,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6380ebb8471a4550ff255c62/ZWdgPmHQBgvFNHnT65V6-.jpeg,85.0,Batuhan,isidentical,785172218215559,"[{'type': 'text', 'value': 'Happy to announce ', 'raw': 'Happy to announce '}, {'type': 'link', 'href': 'https://imgsys.org', 'raw': 'https://imgsys.org'}, {'type': 'text', 'value': ' -- a sister project to Chatbot Arena by lmsys -- for comparing different text guided image generation models models. Try it natively on HuggingFace: ', 'raw': ' -- a sister project to Chatbot Arena by lmsys -- for comparing different text guided image generation models models. Try it natively on HuggingFace: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/fal-ai/imgsys', 'raw': 'https://huggingface.co/spaces/fal-ai/imgsys'}]",Happy to announce https://imgsys.org -- a sister project to Chatbot Arena by lmsys -- for comparing different text guided image generation models models. Try it natively on HuggingFace: https://huggingface.co/spaces/fal-ai/imgsys,[],[],"[{'reaction': '❤️', 'users': ['clefourrier', 'clem', 'julien-c', 'victor', 'radames', 'multimodalart', 'badayvedat', 'DmitryRyumin', 'seyf1elislam', 'lysandre', 'ArthurZ', 'cansa', 'KingNish'], 'count': 13}, {'reaction': '🔥', 'users': ['Dang'], 'count': 1}, {'reaction': '👍', 'users': ['ijohn07'], 'count': 1}]",2024-04-23 18:24:45,2024-04-23 18:41:45.934,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}]",/posts/isidentical/785172218215559,2132,,1 +/avatars/ca8ff74887bbf8eb3f5b04ae9bb6d05b.svg,10.0,Yanzuo Lu,oliveryanzuolu,715163802813981,"[{'type': 'text', 'value': 'Thrilled to introduce our Hyper-SD, offering hyper-fast⚡️ and hyper-quality✨ text-to-image generation.', 'raw': 'Thrilled to introduce our Hyper-SD, offering hyper-fast⚡️ and hyper-quality✨ text-to-image generation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our model achieves SINGLE-STEP inference on both SD1.5 and SDXL architecture without evident losses of aesthetics, styles and structures.', 'raw': 'Our model achieves SINGLE-STEP inference on both SD1.5 and SDXL architecture without evident losses of aesthetics, styles and structures.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Project page: ', 'raw': 'Project page: '}, {'type': 'link', 'href': 'https://hyper-sd.github.io', 'raw': 'https://hyper-sd.github.io'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'HuggingFace repo: ', 'raw': 'HuggingFace repo: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ByteDance/Hyper-SD'}, 'url': 'https://huggingface.co/ByteDance/Hyper-SD', 'raw': 'https://huggingface.co/ByteDance/Hyper-SD'}, {'type': 'new_line', 'raw': '\n'}]","Thrilled to introduce our Hyper-SD, offering hyper-fast⚡️ and hyper-quality✨ text-to-image generation. +Our model achieves SINGLE-STEP inference on both SD1.5 and SDXL architecture without evident losses of aesthetics, styles and structures. +Project page: https://hyper-sd.github.io +HuggingFace repo: https://huggingface.co/ByteDance/Hyper-SD +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6614cbd40bbea65e71db4e1f/x-SvCMyuMDdVDNLESZNeB.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6614cbd40bbea65e71db4e1f/go52WIVe0WReUmbIhimoj.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6614cbd40bbea65e71db4e1f/CTfz4V9NpLFHIwjipZtSB.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6614cbd40bbea65e71db4e1f/r7YLhs8M509AnztkkuBja.png'}]",[],"[{'reaction': '🔥', 'users': ['renyuxi', 'multimodalart', 'DmitryRyumin', 'Tonic', 'YaTharThShaRma999', 'radames', 'victor', 'alielfilali01'], 'count': 8}, {'reaction': '🚀', 'users': ['renyuxi', 'multimodalart', 'DmitryRyumin', 'Tonic', 'YaTharThShaRma999', 'radames', 'victor'], 'count': 7}]",2024-04-23 12:12:43,2024-04-29 07:35:46.981,"[{'_id': '6064e095abd8d3692e3e2ed6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg', 'fullname': 'Radamés Ajna', 'name': 'radames', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2555, 'isFollowing': False}, {'_id': '6614cbd40bbea65e71db4e1f', 'avatarUrl': '/avatars/ca8ff74887bbf8eb3f5b04ae9bb6d05b.svg', 'fullname': 'Yanzuo Lu', 'name': 'oliveryanzuolu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10, 'isFollowing': False}]",/posts/oliveryanzuolu/715163802813981,2677,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg,211.0,Bram Vanroy,BramVanroy,929197065465455,"[{'type': 'text', 'value': '🥳 New license for datasets: Apache 2.0!', 'raw': '🥳 New license for datasets: Apache 2.0!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I have been struggling mentally for many months now with the OpenAI terms of use that indicate that their model outputs cannot be used to build ""competing models"". This leads to many questions:', 'raw': 'I have been struggling mentally for many months now with the OpenAI terms of use that indicate that their model outputs cannot be used to build ""competing models"". This leads to many questions:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- what is the definition of competing? Is it the same as ""commercial""?', 'raw': '- what is the definition of competing? Is it the same as ""commercial""?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- since this is part of the terms of use between OpenAI and the API user, can a third party still use the generated dataset to build competing models?', 'raw': '- since this is part of the terms of use between OpenAI and the API user, can a third party still use the generated dataset to build competing models?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- are such restrictions even legal in the first place?', 'raw': '- are such restrictions even legal in the first place?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Trying to ""follow the rules"" as much as possible despite wanting to be as open as possible, I kept releasing my datasets under non-commercial licenses (which are too restrictive anyhow - nothing should prevent you from using the data in non-LM commercial settings), just like models trained on these datasets. This has put me at a competitive disadvantage compared to creators who do not follow the same approach and release their data/models on apache 2.0 despite the OpenAI ""restrictions"". Moreover, I fear (', 'raw': 'Trying to ""follow the rules"" as much as possible despite wanting to be as open as possible, I kept releasing my datasets under non-commercial licenses (which are too restrictive anyhow - nothing should prevent you from using the data in non-LM commercial settings), just like models trained on these datasets. This has put me at a competitive disadvantage compared to creators who do not follow the same approach and release their data/models on apache 2.0 despite the OpenAI ""restrictions"". Moreover, I fear ('}, {'type': 'link', 'href': 'https://twitter.com/BramVanroy/status/1780220420316164246', 'raw': 'https://twitter.com/BramVanroy/status/1780220420316164246'}, {'type': 'text', 'value': ') that my approach blocks adaptation of my data/models for (commercial) applications/integrations.', 'raw': ') that my approach blocks adaptation of my data/models for (commercial) applications/integrations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thankfully ', 'raw': 'Thankfully '}, {'type': 'mention', 'user': 'Rijgersberg', 'raw': '@Rijgersberg'}, {'type': 'text', 'value': ' noted that these OpenAI terms of use are NOT explicit in the Azure OpenAI API (', 'raw': ' noted that these OpenAI terms of use are NOT explicit in the Azure OpenAI API ('}, {'type': 'link', 'href': 'https://twitter.com/E_Rijgersberg/status/1780308971762450725', 'raw': 'https://twitter.com/E_Rijgersberg/status/1780308971762450725'}, {'type': 'text', 'value': '). Since my latest datasets were created via Azure, this comes as a relief. As far as I can tell after digging through Azure docs, this allows me to change all recent GPT4-generated datasets to apache 2.0! 🥳 ', 'raw': '). Since my latest datasets were created via Azure, this comes as a relief. As far as I can tell after digging through Azure docs, this allows me to change all recent GPT4-generated datasets to apache 2.0! 🥳 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/ultrachat_200k_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/ultrachat_200k_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/ultrachat_200k_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/orca_dpo_pairs_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/orca_dpo_pairs_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/orca_dpo_pairs_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/ultra_feedback_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/ultra_feedback_dutch_cleaned'}, 'url': 'https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch_cleaned', 'raw': 'https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch_cleaned'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- ', 'raw': '- '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/no_robots_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/no_robots_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/no_robots_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I will have to mull over what I'll do for the older GPT3.5 datasets. What do you think that I should do?"", 'raw': ""I will have to mull over what I'll do for the older GPT3.5 datasets. What do you think that I should do?""}]","🥳 New license for datasets: Apache 2.0! + +I have been struggling mentally for many months now with the OpenAI terms of use that indicate that their model outputs cannot be used to build ""competing models"". This leads to many questions: + +- what is the definition of competing? Is it the same as ""commercial""? +- since this is part of the terms of use between OpenAI and the API user, can a third party still use the generated dataset to build competing models? +- are such restrictions even legal in the first place? + +Trying to ""follow the rules"" as much as possible despite wanting to be as open as possible, I kept releasing my datasets under non-commercial licenses (which are too restrictive anyhow - nothing should prevent you from using the data in non-LM commercial settings), just like models trained on these datasets. This has put me at a competitive disadvantage compared to creators who do not follow the same approach and release their data/models on apache 2.0 despite the OpenAI ""restrictions"". Moreover, I fear (https://twitter.com/BramVanroy/status/1780220420316164246) that my approach blocks adaptation of my data/models for (commercial) applications/integrations. + +Thankfully @Rijgersberg noted that these OpenAI terms of use are NOT explicit in the Azure OpenAI API (https://twitter.com/E_Rijgersberg/status/1780308971762450725). Since my latest datasets were created via Azure, this comes as a relief. As far as I can tell after digging through Azure docs, this allows me to change all recent GPT4-generated datasets to apache 2.0! 🥳 + +- https://huggingface.co/datasets/BramVanroy/ultrachat_200k_dutch +- https://huggingface.co/datasets/BramVanroy/orca_dpo_pairs_dutch +- https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch +- https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch_cleaned +- https://huggingface.co/datasets/BramVanroy/no_robots_dutch + +I will have to mull over what I'll do for the older GPT3.5 datasets. What do you think that I should do?",[],"[{'_id': '6319b164bc8f3b313f7a1db0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6319b164bc8f3b313f7a1db0/Hh0kuwsAnD2AOKdL6PpRs.png', 'fullname': 'Edwin Rijgersberg', 'name': 'Rijgersberg', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 55}]","[{'reaction': '🔥', 'users': ['stefan-it', 'JorgeDeC', 'adamo1139', 'alielfilali01', 'martineden'], 'count': 5}, {'reaction': '🚀', 'users': ['Rijgersberg', 'martineden'], 'count': 2}, {'reaction': '🤗', 'users': ['wvangils'], 'count': 1}]",2024-04-19 08:37:36,2024-05-13 18:13:06.098,"[{'_id': '625579238e2cfccdb434c5e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/625579238e2cfccdb434c5e1/WIZtyNNzeSMo3F5Xsr161.jpeg', 'fullname': 'Jorge De Corte', 'name': 'JorgeDeC', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}, {'_id': '5e1e17b6fcf41d740b6996a8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg', 'fullname': 'Bram Vanroy', 'name': 'BramVanroy', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 211, 'isFollowing': False}, {'_id': '6319b164bc8f3b313f7a1db0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6319b164bc8f3b313f7a1db0/Hh0kuwsAnD2AOKdL6PpRs.png', 'fullname': 'Edwin Rijgersberg', 'name': 'Rijgersberg', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 55, 'isFollowing': False}, {'_id': '62d1218684bfbee86b6ee521', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62d1218684bfbee86b6ee521/BpXX_XUP80IfdGAvbs_VI.png', 'fullname': 'MD', 'name': 'markding', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/BramVanroy/929197065465455,2307,,9 +https://cdn-avatars.huggingface.co/v1/production/uploads/5fa19f4ba13e063b8b2b5e11/nGVHdTYX2udnt-K8mqY27.jpeg,1494.0,Abhishek Thakur,abhishek,549518281460732,"[{'type': 'text', 'value': ""With AutoTrain, you can already finetune the latest llama3 models without writing a single line of code. Here's an example finetune of llama3 8b model: "", 'raw': ""With AutoTrain, you can already finetune the latest llama3 models without writing a single line of code. Here's an example finetune of llama3 8b model: ""}, {'type': 'link', 'href': 'https://huggingface.co/abhishek/autotrain-llama3-no-robots', 'raw': 'https://huggingface.co/abhishek/autotrain-llama3-no-robots'}]","With AutoTrain, you can already finetune the latest llama3 models without writing a single line of code. Here's an example finetune of llama3 8b model: https://huggingface.co/abhishek/autotrain-llama3-no-robots",[],[],"[{'reaction': '👀', 'users': ['tinycrops', 'not-lain', 'lunarflu', 'fdaudens', 'xianbao', 'HomerDoh', 'alielfilali01'], 'count': 7}, {'reaction': '🔥', 'users': ['lunarflu', 'xianbao', 'kramp', 'AtonMountlook', 'alielfilali01', 'anldrms'], 'count': 6}, {'reaction': '🚀', 'users': ['lunarflu', 'xianbao', 'alielfilali01'], 'count': 3}]",2024-04-18 17:40:15,2024-04-20 12:12:18.840,"[{'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}, {'_id': '5fa19f4ba13e063b8b2b5e11', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5fa19f4ba13e063b8b2b5e11/nGVHdTYX2udnt-K8mqY27.jpeg', 'fullname': 'Abhishek Thakur', 'name': 'abhishek', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1494, 'isFollowing': False}]",/posts/abhishek/549518281460732,3485,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/647f36a8454af0237bd49574/jshkqBUTY-GZL8As8y6Aq.jpeg,759.0,Florent Daudens,fdaudens,628834201033253,"[{'type': 'text', 'value': 'Open-source AI on your phone? The HuggingChat app is out for iOs, with the best models: Command R, Zephyr Orpo, Mixtral, Gemma... ', 'raw': 'Open-source AI on your phone? The HuggingChat app is out for iOs, with the best models: Command R, Zephyr Orpo, Mixtral, Gemma... '}, {'type': 'link', 'href': 'https://apps.apple.com/ca/app/huggingchat/id6476778843?l=fr-CA', 'raw': 'https://apps.apple.com/ca/app/huggingchat/id6476778843?l=fr-CA'}]","Open-source AI on your phone? The HuggingChat app is out for iOs, with the best models: Command R, Zephyr Orpo, Mixtral, Gemma... https://apps.apple.com/ca/app/huggingchat/id6476778843?l=fr-CA","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/DsjNnKNFg1ElvuHIT40SN.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/647f36a8454af0237bd49574/5YTkVjO7F53GQfScxrZ6d.jpeg'}]",[],"[{'reaction': '😎', 'users': ['HansWuerst', 'lazarustda', 'BrigitteTousi', 'KingNish', 'samusenps', 'lunarflu', 'pcuenq', 'Nymbo'], 'count': 8}, {'reaction': '🚀', 'users': ['lunarflu', 'dankornas', 'nisten', 'pcuenq'], 'count': 4}, {'reaction': '🔥', 'users': ['lunarflu', 'jmattiello'], 'count': 2}, {'reaction': '🤯', 'users': ['lunarflu'], 'count': 1}]",2024-04-18 13:22:25,2024-04-19 01:12:56.752,"[{'_id': '6612aedf09f16e7347dfa7e1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6612aedf09f16e7347dfa7e1/bPYjBXCedY_1fSIPjoBTY.jpeg', 'fullname': 'Nishith Jain', 'name': 'KingNish', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1222, 'isFollowing': False}, {'_id': '65acc58c14d782df067f759b', 'avatarUrl': '/avatars/52a153d04d325469e1be69bce610ebe5.svg', 'fullname': 'Tan Hong Kai', 'name': 'ecyht2', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}]",/posts/fdaudens/628834201033253,4630,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1644340617257-noauth.png,641.0,Clémentine Fourrier,clefourrier,939076244724086,"[{'type': 'text', 'value': 'In a basic chatbots, errors are annoyances. In medical LLMs, errors can have life-threatening consequences 🩸', 'raw': 'In a basic chatbots, errors are annoyances. In medical LLMs, errors can have life-threatening consequences 🩸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's therefore vital to benchmark/follow advances in medical LLMs before even thinking about deployment."", 'raw': ""It's therefore vital to benchmark/follow advances in medical LLMs before even thinking about deployment.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is why a small research team introduced a medical LLM leaderboard, to get reproducible and comparable results between LLMs, and allow everyone to follow advances in the field.', 'raw': 'This is why a small research team introduced a medical LLM leaderboard, to get reproducible and comparable results between LLMs, and allow everyone to follow advances in the field.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'openlifescienceai/open_medical_llm_leaderboard'}, 'url': 'https://huggingface.co/spaces/openlifescienceai/open_medical_llm_leaderboard', 'raw': 'https://huggingface.co/spaces/openlifescienceai/open_medical_llm_leaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congrats to ', 'raw': 'Congrats to '}, {'type': 'mention', 'user': 'aaditya', 'raw': '@aaditya'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'pminervini', 'raw': '@pminervini'}, {'type': 'text', 'value': ' !', 'raw': ' !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Learn more in the blog: ', 'raw': 'Learn more in the blog: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/leaderboard-medicalllm', 'raw': 'https://huggingface.co/blog/leaderboard-medicalllm'}]","In a basic chatbots, errors are annoyances. In medical LLMs, errors can have life-threatening consequences 🩸 + +It's therefore vital to benchmark/follow advances in medical LLMs before even thinking about deployment. + +This is why a small research team introduced a medical LLM leaderboard, to get reproducible and comparable results between LLMs, and allow everyone to follow advances in the field. + +https://huggingface.co/spaces/openlifescienceai/open_medical_llm_leaderboard + +Congrats to @aaditya and @pminervini ! +Learn more in the blog: https://huggingface.co/blog/leaderboard-medicalllm",[],"[{'_id': '5f3fe13d79c1ba4c353d0c19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f3fe13d79c1ba4c353d0c19/XswyGe3OtOdZ6g7rnrgfc.png', 'fullname': 'Ankit Pal', 'name': 'aaditya', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 290}, {'_id': '61001311e043e15c13412d30', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61001311e043e15c13412d30/6yAbTweYR16XtxMBEyOWl.png', 'fullname': 'Pasquale Minervini', 'name': 'pminervini', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 39}]","[{'reaction': '🔥', 'users': ['KonradSzafer', 'qgallouedec', 'BrigitteTousi', 'lunarflu', 'mariagrandury', 'MaziyarPanahi', 'mmhamdy', 'Ramikan-BR', 'UmutKerem'], 'count': 9}, {'reaction': '🚀', 'users': ['KonradSzafer', 'BrigitteTousi', 'lunarflu', 'fdaudens', 'MaziyarPanahi', 'mmhamdy', 'Ramikan-BR', 'UmutKerem'], 'count': 8}, {'reaction': '❤️', 'users': ['samusenps', 'lunarflu', 'nicoism', 'mariagrandury', 'MaziyarPanahi', 'Ramikan-BR', 'Pretergeek', 'UmutKerem'], 'count': 8}]",2024-04-18 12:59:32,2024-04-18 12:59:32.803,[],/posts/clefourrier/939076244724086,6147,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png,2266.0,Tom Aarsen,tomaarsen,476985886331959,"[{'type': 'text', 'value': '🚀 Sentence Transformers v2.7.0 is out! Featuring a new loss function, easier Matryoshka model inference & evaluation, CrossEncoder improvements & Intel Gaudi2 Accelerator support. Details:', 'raw': '🚀 Sentence Transformers v2.7.0 is out! Featuring a new loss function, easier Matryoshka model inference & evaluation, CrossEncoder improvements & Intel Gaudi2 Accelerator support. Details:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ A new loss function: CachedGISTEmbedLoss', 'raw': '1️⃣ A new loss function: CachedGISTEmbedLoss'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This loss function is a combination of CachedMultipleNegativesRankingLoss and the GISTEmbedLoss, both of which are already excellent. The caching mechanism allows for much higher batch sizes with constant memory usage, which boosts training performance. The GIST part introduces a guide model to guide the in-batch negative sample selection. This prevents false negatives, resulting in a stronger training signal.', 'raw': 'This loss function is a combination of CachedMultipleNegativesRankingLoss and the GISTEmbedLoss, both of which are already excellent. The caching mechanism allows for much higher batch sizes with constant memory usage, which boosts training performance. The GIST part introduces a guide model to guide the in-batch negative sample selection. This prevents false negatives, resulting in a stronger training signal.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Automatic Matryoshka model truncation', 'raw': '2️⃣ Automatic Matryoshka model truncation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Matryoshka models produce embeddings that are still useful after truncation. However, this truncation always had to be done manually, until now! We've added a "", 'raw': ""Matryoshka models produce embeddings that are still useful after truncation. However, this truncation always had to be done manually, until now! We've added a ""}, {'type': 'inline_code', 'code': 'truncate_dim', 'raw': '`truncate_dim`'}, {'type': 'text', 'value': ' option to the Sentence Transformer constructor. This also allows truncation when using ', 'raw': ' option to the Sentence Transformer constructor. This also allows truncation when using '}, {'type': 'inline_code', 'code': 'HuggingFaceEmbeddings', 'raw': '`HuggingFaceEmbeddings`'}, {'type': 'text', 'value': ' from LlamaIndex or LangChain.', 'raw': ' from LlamaIndex or LangChain.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Additionally, you can now specify ', 'raw': '3️⃣ Additionally, you can now specify '}, {'type': 'inline_code', 'code': 'truncate_dim', 'raw': '`truncate_dim`'}, {'type': 'text', 'value': "" in evaluators to get the performance after truncation. (Hint: it's surprisingly good, even for models not trained with MatryoshkaLoss, and it can speed up e.g. clustering, retrieval, etc.)"", 'raw': "" in evaluators to get the performance after truncation. (Hint: it's surprisingly good, even for models not trained with MatryoshkaLoss, and it can speed up e.g. clustering, retrieval, etc.)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4️⃣ CrossEncoder improvements', 'raw': '4️⃣ CrossEncoder improvements'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The CrossEncoder now supports 'push_to_hub' to upload trained reranker models to Hugging Face. Additionally, CrossEncoders now support "", 'raw': ""The CrossEncoder now supports 'push_to_hub' to upload trained reranker models to Hugging Face. Additionally, CrossEncoders now support ""}, {'type': 'inline_code', 'code': 'trust_remote_code', 'raw': '`trust_remote_code`'}, {'type': 'text', 'value': ' to load models with custom modelling code.', 'raw': ' to load models with custom modelling code.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5️⃣ Inference on Intel Gaudi2', 'raw': '5️⃣ Inference on Intel Gaudi2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you have an Intel Gaudi2 Accelerator, Sentence Transformers now uses it automatically for even faster inference. No changes are necessary to your code, the device is automatically detected!', 'raw': 'If you have an Intel Gaudi2 Accelerator, Sentence Transformers now uses it automatically for even faster inference. No changes are necessary to your code, the device is automatically detected!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the release notes for all of the details: ', 'raw': 'Check out the release notes for all of the details: '}, {'type': 'link', 'href': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v2.7.0', 'raw': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v2.7.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'm very excited for the upcoming releases: I'm making great progress with a notable v3 refactor that should heavily improve the training process for embedding models!"", 'raw': ""I'm very excited for the upcoming releases: I'm making great progress with a notable v3 refactor that should heavily improve the training process for embedding models!""}]","🚀 Sentence Transformers v2.7.0 is out! Featuring a new loss function, easier Matryoshka model inference & evaluation, CrossEncoder improvements & Intel Gaudi2 Accelerator support. Details: + +1️⃣ A new loss function: CachedGISTEmbedLoss +This loss function is a combination of CachedMultipleNegativesRankingLoss and the GISTEmbedLoss, both of which are already excellent. The caching mechanism allows for much higher batch sizes with constant memory usage, which boosts training performance. The GIST part introduces a guide model to guide the in-batch negative sample selection. This prevents false negatives, resulting in a stronger training signal. + +2️⃣ Automatic Matryoshka model truncation +Matryoshka models produce embeddings that are still useful after truncation. However, this truncation always had to be done manually, until now! We've added a `truncate_dim` option to the Sentence Transformer constructor. This also allows truncation when using `HuggingFaceEmbeddings` from LlamaIndex or LangChain. + +3️⃣ Additionally, you can now specify `truncate_dim` in evaluators to get the performance after truncation. (Hint: it's surprisingly good, even for models not trained with MatryoshkaLoss, and it can speed up e.g. clustering, retrieval, etc.) + +4️⃣ CrossEncoder improvements +The CrossEncoder now supports 'push_to_hub' to upload trained reranker models to Hugging Face. Additionally, CrossEncoders now support `trust_remote_code` to load models with custom modelling code. + +5️⃣ Inference on Intel Gaudi2 +If you have an Intel Gaudi2 Accelerator, Sentence Transformers now uses it automatically for even faster inference. No changes are necessary to your code, the device is automatically detected! + +Check out the release notes for all of the details: https://github.com/UKPLab/sentence-transformers/releases/tag/v2.7.0 + +I'm very excited for the upcoming releases: I'm making great progress with a notable v3 refactor that should heavily improve the training process for embedding models!",[],[],"[{'reaction': '🔥', 'users': ['ajibawa-2023', 'mdouglas', 'BrigitteTousi', 'lunarflu', 'mrdbourke', 'beomi', 'nickprock', 'alvarobartt', 'icpro', 'MichelleRuwen', 'not-lain', 'Ali-Khaled', 'adityakusupati', 'LeoLee23'], 'count': 14}]",2024-04-18 10:59:59,2024-04-25 00:32:25.106,"[{'_id': '6527e89a8808d80ccff88b7a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6527e89a8808d80ccff88b7a/CuGNmF1Et8KMQ0mCd1NEJ.jpeg', 'fullname': 'Hafedh Hichri', 'name': 'not-lain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2185, 'isFollowing': False}, {'_id': '660f051c77a1e2509aa23b19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/Vz7ejW9--mwg9qnZHibPb.jpeg', 'fullname': 'Ali Khaled', 'name': 'Ali-Khaled', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/tomaarsen/476985886331959,3200,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1638698875017-noauth.jpeg,189.0,Violette,Violette,831339039064129,"[{'type': 'text', 'value': '🔥 Next Thursday 4/25 at 8am PT / 11am ET / 17h CET, join our live Hugging Cast to learn how to deploy open models on Google Cloud. ', 'raw': '🔥 Next Thursday 4/25 at 8am PT / 11am ET / 17h CET, join our live Hugging Cast to learn how to deploy open models on Google Cloud. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Register ➡️ ', 'raw': 'Register ➡️ '}, {'type': 'link', 'href': 'https://streamyard.com/watch/xz2nxp85Pi6e', 'raw': 'https://streamyard.com/watch/xz2nxp85Pi6e'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'philschmid', 'raw': '@philschmid'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'tengomucho', 'raw': '@tengomucho'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'jeffboudier', 'raw': '@jeffboudier'}, {'type': 'text', 'value': ' will show you brand new Hub integrations built with GCP ', 'raw': ' will show you brand new Hub integrations built with GCP '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥 with HF Inference Endpoints', 'raw': '🔥 with HF Inference Endpoints'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌎 with Vertex and GKE', 'raw': '🌎 with Vertex and GKE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 on TPU', 'raw': '🚀 on TPU'}]","🔥 Next Thursday 4/25 at 8am PT / 11am ET / 17h CET, join our live Hugging Cast to learn how to deploy open models on Google Cloud. + +Register ➡️ https://streamyard.com/watch/xz2nxp85Pi6e + +@philschmid , @tengomucho , @jeffboudier will show you brand new Hub integrations built with GCP +🔥 with HF Inference Endpoints +🌎 with Vertex and GKE +🚀 on TPU",[],"[{'_id': '5fac18fb5eec0323e9470ba2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1605114051380-noauth.jpeg', 'fullname': 'Jeff Boudier', 'name': 'jeffboudier', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 315}, {'_id': '5ff5d596f244529b3ec0fb89', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1624629516652-5ff5d596f244529b3ec0fb89.png', 'fullname': 'Philipp Schmid', 'name': 'philschmid', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 873}, {'_id': '6311157564939fabc00c87ec', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6311157564939fabc00c87ec/eOaySRhZnV2maIUC_7HgC.jpeg', 'fullname': 'Alvaro Moran', 'name': 'tengomucho', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 25}]","[{'reaction': '🚀', 'users': ['ajibawa-2023', 'BrigitteTousi', 'lunarflu', 'not-lain', 'CYNIC78', 'n-e-w', 'Ryukijano'], 'count': 7}, {'reaction': '👍', 'users': ['Bkarine', 'lunarflu', 'not-lain', 'CYNIC78', 'Ryukijano'], 'count': 5}, {'reaction': '🔥', 'users': ['not-lain', 'CYNIC78', 'Ryukijano'], 'count': 3}]",2024-04-18 10:03:50,2024-04-18 10:03:50.729,[],/posts/Violette/831339039064129,3118,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e56829137cb5b49818287ea/8HYzJeRc4b9Wu7BfJwibS.png,448.0,Lee Junbum,beomi,277288382277555,"[{'type': 'text', 'value': '🚀 **InfiniTransformer, Gemma/Llama3 based Implementation!** 🌌', 'raw': '🚀 **InfiniTransformer, Gemma/Llama3 based Implementation!** 🌌'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Update @ 2024.04.19: It now supports Llama-3!', 'raw': '> Update @ 2024.04.19: It now supports Llama-3!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '> Note: this implementation is unofficial', 'raw': '> Note: this implementation is unofficial'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This implementation is designed to handle virtually infinite context lengths.', 'raw': 'This implementation is designed to handle virtually infinite context lengths.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Here's the github repo: "", 'raw': ""Here's the github repo: ""}, {'type': 'link', 'href': 'https://github.com/Beomi/InfiniTransformer', 'raw': 'https://github.com/Beomi/InfiniTransformer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 **Read the original Paper:** ', 'raw': '📄 **Read the original Paper:** '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2404.07143', 'raw': 'https://arxiv.org/abs/2404.07143'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## **Focus on Infini-Attention**', 'raw': '## **Focus on Infini-Attention**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **2 Types of Implementation available:** Attention-layer only implementation / Model & Train-wise implementation', 'raw': '- **2 Types of Implementation available:** Attention-layer only implementation / Model & Train-wise implementation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Fixed(segment dependent) Memory Usage:** Enables training on larger models and longer sequences without the memory overhead typical of standard Transformer implementations.', 'raw': '- **Fixed(segment dependent) Memory Usage:** Enables training on larger models and longer sequences without the memory overhead typical of standard Transformer implementations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- **Infinite Context Capability:** Train with unprecedented sequence lengths—imagine handling up to 1 million sequence lengths on standard hardware!', 'raw': '- **Infinite Context Capability:** Train with unprecedented sequence lengths—imagine handling up to 1 million sequence lengths on standard hardware!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - You could train Gemma-2B with 1M sequence length with 2K segmentation size with single H100 GPU.', 'raw': ' - You could train Gemma-2B with 1M sequence length with 2K segmentation size with single H100 GPU.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '## **Try InfiniTransformer**', 'raw': '## **Try InfiniTransformer**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. **Clone the repository:**', 'raw': '1. **Clone the repository:**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'inline_code', 'code': '', 'raw': '``'}, {'type': 'inline_code', 'code': 'bash\n git clone https://github.com/Beomi/InfiniTransformer\n ', 'raw': '`bash\n git clone https://github.com/Beomi/InfiniTransformer\n `'}, {'type': 'inline_code', 'code': '', 'raw': '``'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. **Install necessary tools:**', 'raw': '2. **Install necessary tools:**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'inline_code', 'code': '', 'raw': '``'}, {'type': 'inline_code', 'code': 'bash\n pip install -r requirements.txt\n pip install -e git+https://github.com/huggingface/transformers.git@b109257f4f#egg=transformers\n ', 'raw': '`bash\n pip install -r requirements.txt\n pip install -e git+https://github.com/huggingface/transformers.git@b109257f4f#egg=transformers\n `'}, {'type': 'inline_code', 'code': '', 'raw': '``'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. **Dive Deep into Custom Training:**', 'raw': '3. **Dive Deep into Custom Training:**'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Train with extensive sequence lengths using scripts such as ', 'raw': ' - Train with extensive sequence lengths using scripts such as '}, {'type': 'inline_code', 'code': './train.gemma.infini.noclm.1Mseq.sh', 'raw': '`./train.gemma.infini.noclm.1Mseq.sh`'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'for more detailed info, please visit Repo: ', 'raw': 'for more detailed info, please visit Repo: '}, {'type': 'link', 'href': 'https://github.com/Beomi/InfiniTransformer', 'raw': 'https://github.com/Beomi/InfiniTransformer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Look forward to see your feedbacks! 😊', 'raw': 'Look forward to see your feedbacks! 😊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ps. Training loss plot is here 😉', 'raw': 'ps. Training loss plot is here 😉'}]","🚀 **InfiniTransformer, Gemma/Llama3 based Implementation!** 🌌 + +> Update @ 2024.04.19: It now supports Llama-3! + +> Note: this implementation is unofficial + +This implementation is designed to handle virtually infinite context lengths. + +Here's the github repo: https://github.com/Beomi/InfiniTransformer + +📄 **Read the original Paper:** https://arxiv.org/abs/2404.07143 + +## **Focus on Infini-Attention** + +- **2 Types of Implementation available:** Attention-layer only implementation / Model & Train-wise implementation +- **Fixed(segment dependent) Memory Usage:** Enables training on larger models and longer sequences without the memory overhead typical of standard Transformer implementations. +- **Infinite Context Capability:** Train with unprecedented sequence lengths—imagine handling up to 1 million sequence lengths on standard hardware! + - You could train Gemma-2B with 1M sequence length with 2K segmentation size with single H100 GPU. + +## **Try InfiniTransformer** + +1. **Clone the repository:** + ```bash + git clone https://github.com/Beomi/InfiniTransformer + ``` +2. **Install necessary tools:** + ```bash + pip install -r requirements.txt + pip install -e git+https://github.com/huggingface/transformers.git@b109257f4f#egg=transformers + ``` +3. **Dive Deep into Custom Training:** + - Train with extensive sequence lengths using scripts such as `./train.gemma.infini.noclm.1Mseq.sh`. + +for more detailed info, please visit Repo: https://github.com/Beomi/InfiniTransformer + +Look forward to see your feedbacks! 😊 + +ps. Training loss plot is here 😉","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e56829137cb5b49818287ea/xxpj9FbHSM3JRU47jKsNK.png'}]",[],"[{'reaction': '🚀', 'users': ['tomaarsen', 'maywell', 'lunarflu', 'hllj', 'Joseph717171', 'genne', 'HyeonjinXZ'], 'count': 7}, {'reaction': '🔥', 'users': ['flozi00', 'raidhon', 'Joseph717171', 'sosoai'], 'count': 4}, {'reaction': '👍', 'users': ['gangyeolkim', 'Joseph717171', 'nebchi'], 'count': 3}]",2024-04-18 09:16:53,2024-04-19 07:47:45.480,"[{'_id': '6317233cc92fd6fee317e030', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png', 'fullname': 'Tom Aarsen', 'name': 'tomaarsen', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2266, 'isFollowing': False}, {'_id': '5e56829137cb5b49818287ea', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e56829137cb5b49818287ea/8HYzJeRc4b9Wu7BfJwibS.png', 'fullname': 'Lee Junbum', 'name': 'beomi', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 448, 'isFollowing': False}]",/posts/beomi/277288382277555,12297,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1637676375949-619cceec7a7f673c3bbc2597.jpeg,23.0,Carlos Rodríguez,crodri,967430752807152,"[{'type': 'text', 'value': 'Multilingual RAG optimized models and datasets available from the Language Technologies Unit @ the Barcelona Supercomputing Unit', 'raw': 'Multilingual RAG optimized models and datasets available from the Language Technologies Unit @ the Barcelona Supercomputing Unit'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We are releasing new RAG-optimized multilingual models and dataset, within the AINA project contributions:', 'raw': 'We are releasing new RAG-optimized multilingual models and dataset, within the AINA project contributions:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'projecte-aina/FlorRAG'}, 'url': 'https://huggingface.co/projecte-aina/FlorRAG', 'raw': 'https://huggingface.co/projecte-aina/FlorRAG'}, {'type': 'text', 'value': ' , based on out Bloom Flor6.3b model, capable of RAG in Catalan, Spanish and English', 'raw': ' , based on out Bloom Flor6.3b model, capable of RAG in Catalan, Spanish and English'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'projecte-aina/RAG_Multilingual'}, 'url': 'https://huggingface.co/datasets/projecte-aina/RAG_Multilingual', 'raw': 'https://huggingface.co/datasets/projecte-aina/RAG_Multilingual'}, {'type': 'text', 'value': ' , a 56K+ instructional dataset with human-like answers created from kernel-of-truth of extractive datasets using a Mixtral8x7b model', 'raw': ' , a 56K+ instructional dataset with human-like answers created from kernel-of-truth of extractive datasets using a Mixtral8x7b model'}]","Multilingual RAG optimized models and datasets available from the Language Technologies Unit @ the Barcelona Supercomputing Unit +We are releasing new RAG-optimized multilingual models and dataset, within the AINA project contributions: + +https://huggingface.co/projecte-aina/FlorRAG , based on out Bloom Flor6.3b model, capable of RAG in Catalan, Spanish and English + +https://huggingface.co/datasets/projecte-aina/RAG_Multilingual , a 56K+ instructional dataset with human-like answers created from kernel-of-truth of extractive datasets using a Mixtral8x7b model",[],[],"[{'reaction': '🔥', 'users': ['tomaarsen', 'lunarflu', 'ljaume'], 'count': 3}]",2024-04-18 08:56:59,2024-04-18 08:56:59.989,[],/posts/crodri/967430752807152,2409,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,597835329939130,"[{'type': 'text', 'value': ""Let's breakdown the technical details in Microsoft's mind blowing Lifelike audio-driven talking faces framework - VASA and model VASA-1:"", 'raw': ""Let's breakdown the technical details in Microsoft's mind blowing Lifelike audio-driven talking faces framework - VASA and model VASA-1:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Summary of Summaries', 'raw': 'Summary of Summaries'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The paper introduces VASA, a framework for generating lifelike talking faces with appealing visual affective skills (VAS) from a single image and speech audio.', 'raw': '- The paper introduces VASA, a framework for generating lifelike talking faces with appealing visual affective skills (VAS) from a single image and speech audio.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Core innovations include a diffusion-based model for holistic generation of facial dynamics and head movements in an expressive, disentangled face latent space developed using video data..', 'raw': '- Core innovations include a diffusion-based model for holistic generation of facial dynamics and head movements in an expressive, disentangled face latent space developed using video data..'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- VASA-1 Generates high-quality 512x512 videos at up to 40 FPS with low latency.', 'raw': '- VASA-1 Generates high-quality 512x512 videos at up to 40 FPS with low latency.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Supports real-time generation of lifelike, emotive talking faces.', 'raw': '- Supports real-time generation of lifelike, emotive talking faces.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Summary of Overall Framework:', 'raw': 'Summary of Overall Framework:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- VASA generates facial dynamics and head motion in latent space, conditioned on audio and other signals', 'raw': '- VASA generates facial dynamics and head motion in latent space, conditioned on audio and other signals'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Instead of directly generating video frames, it generates holistic facial dynamics and head motion in a latent space, conditioned on audio and optional signals.', 'raw': '- Instead of directly generating video frames, it generates holistic facial dynamics and head motion in a latent space, conditioned on audio and optional signals.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- To achieve this, the framework uses a face encoder-decoder to extract appearance and identity features and train a Diffusion Transformer model to generate motion latent codes.', 'raw': '- To achieve this, the framework uses a face encoder-decoder to extract appearance and identity features and train a Diffusion Transformer model to generate motion latent codes.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Technical Method Details:', 'raw': 'Technical Method Details:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Expressive and Disentangled Face Latent Space Construction:', 'raw': 'Expressive and Disentangled Face Latent Space Construction:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Based on 3D-AID face reenactment framework ', 'raw': ' - Based on 3D-AID face reenactment framework '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Decomposes face into 3D appearance volume, identity code, head pose, ', 'raw': ' - Decomposes face into 3D appearance volume, identity code, head pose, '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' and facial dynamics latents', 'raw': ' and facial dynamics latents'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Uses encoders to extract these latent factors from face images.', 'raw': ' - Uses encoders to extract these latent factors from face images.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Applies additional losses to improve disentanglement:', 'raw': ' - Applies additional losses to improve disentanglement:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Pairwise head pose and facial dynamics transfer loss', 'raw': ' - Pairwise head pose and facial dynamics transfer loss'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Face identity similarity loss for cross-identity pose/dynamics transfer', 'raw': ' - Face identity similarity loss for cross-identity pose/dynamics transfer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Holistic Facial Dynamics Generation with Diffusion Transformer:', 'raw': 'Holistic Facial Dynamics Generation with Diffusion Transformer:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Represents all facial movements (lip, expression, gaze, etc.) as a single ', 'raw': '- Represents all facial movements (lip, expression, gaze, etc.) as a single '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'latent sequence', 'raw': 'latent sequence'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Applies a Diffusion Transformer model to generate the facial dynamics sequence.', 'raw': ' - Applies a Diffusion Transformer model to generate the facial dynamics sequence.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Diffusion Transformer trained with simplified denoising score matching objective.', 'raw': '- Diffusion Transformer trained with simplified denoising score matching objective.'}]","Let's breakdown the technical details in Microsoft's mind blowing Lifelike audio-driven talking faces framework - VASA and model VASA-1: + +Summary of Summaries +- The paper introduces VASA, a framework for generating lifelike talking faces with appealing visual affective skills (VAS) from a single image and speech audio. +- Core innovations include a diffusion-based model for holistic generation of facial dynamics and head movements in an expressive, disentangled face latent space developed using video data.. +- VASA-1 Generates high-quality 512x512 videos at up to 40 FPS with low latency. +- Supports real-time generation of lifelike, emotive talking faces. + +Summary of Overall Framework: +- VASA generates facial dynamics and head motion in latent space, conditioned on audio and other signals +- Instead of directly generating video frames, it generates holistic facial dynamics and head motion in a latent space, conditioned on audio and optional signals. +- To achieve this, the framework uses a face encoder-decoder to extract appearance and identity features and train a Diffusion Transformer model to generate motion latent codes. + +Technical Method Details: +Expressive and Disentangled Face Latent Space Construction: + - Based on 3D-AID face reenactment framework + - Decomposes face into 3D appearance volume, identity code, head pose, + and facial dynamics latents + - Uses encoders to extract these latent factors from face images. + - Applies additional losses to improve disentanglement: + - Pairwise head pose and facial dynamics transfer loss + - Face identity similarity loss for cross-identity pose/dynamics transfer + +Holistic Facial Dynamics Generation with Diffusion Transformer: +- Represents all facial movements (lip, expression, gaze, etc.) as a single +latent sequence + - Applies a Diffusion Transformer model to generate the facial dynamics sequence. +- Diffusion Transformer trained with simplified denoising score matching objective.","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/0fcSHVEcCrasx9gNXHVsX.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/F7BZ8ntU9lqugXy0G3bH8.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/8CAlRUfpQz24zuurXzg8g.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/l52Nb3cLkALPAs_fG7vej.png'}]",[],"[{'reaction': '👍', 'users': ['dashfunnydashdash', 'alvarlaigna', 'SandraToolan', 'pe65374', 'youngzoo', 'ajibawa-2023', 'MH0386', 'lunarflu', 'DmitryRyumin', 'VanshGehlot'], 'count': 10}, {'reaction': '👀', 'users': ['clem', 'cncqbaiying', 'Logge', 'victor', 'SandraToolan', 'lunarflu'], 'count': 6}, {'reaction': '🔥', 'users': ['DmitryRyumin', 'adityamallah', 'MortezaASL'], 'count': 3}, {'reaction': '😎', 'users': ['callmyname'], 'count': 1}]",2024-04-18 03:50:33,2024-04-20 18:43:53.102,"[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398, 'isFollowing': False}, {'_id': '6471d2075ffbc18f197a1e16', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6471d2075ffbc18f197a1e16/t1NA_KMUkYHKokuzfijLy.jpeg', 'fullname': 'ScottzModelz', 'name': 'ScottzModelz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6438a9027de34e8ea7e4b257', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg', 'fullname': 'Jaward Sesay', 'name': 'Jaward', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 331, 'isFollowing': False}, {'_id': '6351b955ef8786433eca3a3c', 'avatarUrl': '/avatars/9d6860a551de0d4912e08e64589921dc.svg', 'fullname': 'John Steward', 'name': 'HDiffusion', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '652c3351aba3aefb7dda0ba0', 'avatarUrl': '/avatars/7c206b757264e9149f34f433bc7f2e1f.svg', 'fullname': 'edwardsnowedin', 'name': 'vihangsharma', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/Jaward/597835329939130,3345,,12 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/zxdZvpuAP6qEhk3vyRO3_.jpeg,34.0,Zoltan Csaki,zolicsaki,821456421497684,"[{'type': 'text', 'value': 'We posted new SOTA SambaLingo 70B parameter models for Arabic, Thai and Hungarian! ', 'raw': 'We posted new SOTA SambaLingo 70B parameter models for Arabic, Thai and Hungarian! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the models here ', 'raw': 'Check out the models here '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'sambanovasystems/sambalingo-65e25770f2037c85ad35ca77'}, 'url': 'https://huggingface.co/collections/sambanovasystems/sambalingo-65e25770f2037c85ad35ca77', 'raw': 'https://huggingface.co/collections/sambanovasystems/sambalingo-65e25770f2037c85ad35ca77'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and our paper ', 'raw': 'and our paper '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/abs/2404.05829', 'raw': 'https://arxiv.org/abs/2404.05829'}, {'type': 'new_line', 'raw': '\n'}]","We posted new SOTA SambaLingo 70B parameter models for Arabic, Thai and Hungarian! + +Check out the models here https://huggingface.co/collections/sambanovasystems/sambalingo-65e25770f2037c85ad35ca77 + +and our paper +https://arxiv.org/abs/2404.05829 +",[],[],"[{'reaction': '🚀', 'users': ['Dlbk', 'boapps', 'Violette', 'zolicsaki', 'lunarflu', 'Axilex', 'abidlabs', 'franchb', 'clem', 'ChangranHuuu', 'jlli'], 'count': 11}, {'reaction': '❤️', 'users': ['lunarflu', 'Axilex', 'abidlabs', 'franchb', 'ChangranHuuu'], 'count': 5}, {'reaction': '🤗', 'users': ['lunarflu', 'ChangranHuuu'], 'count': 2}, {'reaction': '🔥', 'users': ['zsolx2'], 'count': 1}]",2024-04-17 21:58:01,2024-04-18 16:39:35.944,[],/posts/zolicsaki/821456421497684,2807,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62611fcabbcbd1c34f1615f6/tXx0cbsnQM03EinbKMY0x.jpeg,1095.0,Yoach Lacombe,ylacombe,419348206292516,"[{'type': 'text', 'value': 'Yesterday, we released Parler-TTS and Data-Speech, fully open-source reproduction of work from the paper: ', 'raw': 'Yesterday, we released Parler-TTS and Data-Speech, fully open-source reproduction of work from the paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.01912'}, 'url': 'https://huggingface.co/papers/2402.01912', 'raw': 'https://huggingface.co/papers/2402.01912', 'label': 'Natural language guidance of high-fidelity text-to-speech with synthetic\n annotations (2402.01912)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Parler-TTS is a lightweight text-to-speech (TTS) model that can generate high-quality, natural sounding speech in the style of a given speaker (gender, pitch, speaking style, etc).', 'raw': 'Parler-TTS is a lightweight text-to-speech (TTS) model that can generate high-quality, natural sounding speech in the style of a given speaker (gender, pitch, speaking style, etc).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/collections/parler-tts/parler-tts-fully-open-source-high-quality-tts-models-66164ad285ba03e8ffde214c', 'raw': 'https://huggingface.co/collections/parler-tts/parler-tts-fully-open-source-high-quality-tts-models-66164ad285ba03e8ffde214c'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Parler-TTS Mini v0.1, is the first iteration Parler-TTS model trained using 10k hours of narrated audiobooks. It generates high-quality speech with features that can be controlled using a simple text prompt (e.g. gender, background noise, speaking rate, pitch and reverberation).', 'raw': 'Parler-TTS Mini v0.1, is the first iteration Parler-TTS model trained using 10k hours of narrated audiobooks. It generates high-quality speech with features that can be controlled using a simple text prompt (e.g. gender, background noise, speaking rate, pitch and reverberation).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""To improve the prosody and naturalness of the speech further, we're scaling up the amount of training data to 50k hours of speech. The v1 release of the model will be trained on this data, as well as inference optimisations, such as flash attention and torch compile."", 'raw': ""To improve the prosody and naturalness of the speech further, we're scaling up the amount of training data to 50k hours of speech. The v1 release of the model will be trained on this data, as well as inference optimisations, such as flash attention and torch compile.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'parler-tts/parler_tts_mini_v0.1'}, 'url': 'https://huggingface.co/parler-tts/parler_tts_mini_v0.1', 'raw': 'https://huggingface.co/parler-tts/parler_tts_mini_v0.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Data-Speech can be used for annotating speech characteristics in a large-scale setting.', 'raw': 'Data-Speech can be used for annotating speech characteristics in a large-scale setting.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'parler-tts/open-source-speech-datasets-annotated-using-data-speech-661648ffa0d3d76bfa23d534'}, 'url': 'https://huggingface.co/collections/parler-tts/open-source-speech-datasets-annotated-using-data-speech-661648ffa0d3d76bfa23d534', 'raw': 'https://huggingface.co/collections/parler-tts/open-source-speech-datasets-annotated-using-data-speech-661648ffa0d3d76bfa23d534'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This work is both scalable and easily modifiable and will hopefully help the TTS research community explore new ways of conditionning speech synthesis.', 'raw': 'This work is both scalable and easily modifiable and will hopefully help the TTS research community explore new ways of conditionning speech synthesis.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All of the datasets, pre-processing, training code and weights are released publicly under permissive license, enabling the community to build on our work and develop their own powerful TTS models.', 'raw': 'All of the datasets, pre-processing, training code and weights are released publicly under permissive license, enabling the community to build on our work and develop their own powerful TTS models.'}, {'type': 'new_line', 'raw': '\n'}]","Yesterday, we released Parler-TTS and Data-Speech, fully open-source reproduction of work from the paper: https://huggingface.co/papers/2402.01912 + +Parler-TTS is a lightweight text-to-speech (TTS) model that can generate high-quality, natural sounding speech in the style of a given speaker (gender, pitch, speaking style, etc). + +https://huggingface.co/collections/parler-tts/parler-tts-fully-open-source-high-quality-tts-models-66164ad285ba03e8ffde214c + +Parler-TTS Mini v0.1, is the first iteration Parler-TTS model trained using 10k hours of narrated audiobooks. It generates high-quality speech with features that can be controlled using a simple text prompt (e.g. gender, background noise, speaking rate, pitch and reverberation). + +To improve the prosody and naturalness of the speech further, we're scaling up the amount of training data to 50k hours of speech. The v1 release of the model will be trained on this data, as well as inference optimisations, such as flash attention and torch compile. + +https://huggingface.co/parler-tts/parler_tts_mini_v0.1 + +Data-Speech can be used for annotating speech characteristics in a large-scale setting. + +https://huggingface.co/collections/parler-tts/open-source-speech-datasets-annotated-using-data-speech-661648ffa0d3d76bfa23d534 + +This work is both scalable and easily modifiable and will hopefully help the TTS research community explore new ways of conditionning speech synthesis. + +All of the datasets, pre-processing, training code and weights are released publicly under permissive license, enabling the community to build on our work and develop their own powerful TTS models. +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62611fcabbcbd1c34f1615f6/waoP_QoRLhv-NkzYdueIY.mp4'}]",[],"[{'reaction': '🔥', 'users': ['sanchit-gandhi', 'reach-vb', 'thomwolf', 'DmitryRyumin', 'osanseviero', 'clem', 'Lyte', 'maxugly', 'sroman3', 'blanchon', 'finefin', 'erkhem-gantulga', 'vedadom', 'microdiploma'], 'count': 14}, {'reaction': '❤️', 'users': ['reach-vb', 'thomwolf', 'samusenps', 'osanseviero', 'clem', 'maxugly', 'vedadom'], 'count': 7}, {'reaction': '🤯', 'users': ['reach-vb', 'thomwolf', 'osanseviero', 'clem', 'maxugly', 'vedadom'], 'count': 6}, {'reaction': '👍', 'users': ['wsuff', 'maxugly', 'abdouaziiz', 'vedadom', 'Pis-py', 'swaggerfei'], 'count': 6}, {'reaction': '👀', 'users': ['reach-vb', 'clem', 'maxugly'], 'count': 3}]",2024-04-11 09:59:56,2025-03-13 20:05:23.431,"[{'_id': '66ce52dc047657a30c2d014a', 'avatarUrl': '/avatars/52b1caa513c661e00b5c083599537156.svg', 'fullname': 'sri_parameshananda', 'name': 'sriparamesha', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/ylacombe/419348206292516,7969,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/60cd486d723acf5eb46fe8d3/Z1bD1kjvZ0QAOjZna41Xr.jpeg,62.0,Waseem AlShikh,wassemgtk,204283859077004,"[{'type': 'text', 'value': 'Writer team had the opportunity to run an eval for Mixtral-8x22b, results were interesting.', 'raw': 'Writer team had the opportunity to run an eval for Mixtral-8x22b, results were interesting.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| #mmlu 77.26 |', 'raw': '| #mmlu 77.26 |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| #hellaswag 88.81 |', 'raw': '| #hellaswag 88.81 |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| #truthfulqa 52.05 |', 'raw': '| #truthfulqa 52.05 |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| #arc_challenge 70.31 |', 'raw': '| #arc_challenge 70.31 |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| #winogrande 84.93 | ', 'raw': '| #winogrande 84.93 | '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| #gsm8k 76.65 |', 'raw': '| #gsm8k 76.65 |'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '| ---------------------------- |', 'raw': '| ---------------------------- |'}]","Writer team had the opportunity to run an eval for Mixtral-8x22b, results were interesting. + +| ---------------------------- | +| #mmlu 77.26 | +| ---------------------------- | +| #hellaswag 88.81 | +| ---------------------------- | +| #truthfulqa 52.05 | +| ---------------------------- | +| #arc_challenge 70.31 | +| ---------------------------- | +| #winogrande 84.93 | +| ---------------------------- | +| #gsm8k 76.65 | +| ---------------------------- |","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60cd486d723acf5eb46fe8d3/3uzTjarADCM_X435pFvQ6.png'}]",[],"[{'reaction': '🔥', 'users': ['den0620', 'kur4i', 'osanseviero', 'clem', 'Jason233', 'wannaphong', 'firstrow', 'ChuxiJ', 'nbroad'], 'count': 9}]",2024-04-10 15:43:07,2024-05-08 16:03:28.194,"[{'_id': '657eb5b256c9c67605a6e8b5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/657eb5b256c9c67605a6e8b5/RPblnGJX57oiIcASEz_S8.png', 'fullname': 'raincandy_U', 'name': 'raincandy-u', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 37, 'isFollowing': False}]",/posts/wassemgtk/204283859077004,3646,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png,2157.0,mrfakename,mrfakename,985266791506817,"[{'type': 'text', 'value': ""Mistral AI recently released a new Mixtral model. It's another Mixture of Experts model with 8 experts, each with 22B parameters. It requires over 200GB of VRAM to run in float16, and over 70GB of VRAM to run in int4. However, individuals have been successful at finetuning it on Apple Silicon laptops using the MLX framework. It features a 64K context window, twice that of their previous models (32K)."", 'raw': ""Mistral AI recently released a new Mixtral model. It's another Mixture of Experts model with 8 experts, each with 22B parameters. It requires over 200GB of VRAM to run in float16, and over 70GB of VRAM to run in int4. However, individuals have been successful at finetuning it on Apple Silicon laptops using the MLX framework. It features a 64K context window, twice that of their previous models (32K).""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The model was released over torrent, a method Mistral has recently often used for their releases. While the license has not been confirmed yet, a moderator on their Discord server yesterday suggested it was Apache 2.0 licensed.', 'raw': 'The model was released over torrent, a method Mistral has recently often used for their releases. While the license has not been confirmed yet, a moderator on their Discord server yesterday suggested it was Apache 2.0 licensed.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sources:', 'raw': 'Sources:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• ', 'raw': '• '}, {'type': 'link', 'href': 'https://twitter.com/_philschmid/status/1778051363554934874', 'raw': 'https://twitter.com/_philschmid/status/1778051363554934874'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '• ', 'raw': '• '}, {'type': 'link', 'href': 'https://twitter.com/reach_vb/status/1777946948617605384', 'raw': 'https://twitter.com/reach_vb/status/1777946948617605384'}]","Mistral AI recently released a new Mixtral model. It's another Mixture of Experts model with 8 experts, each with 22B parameters. It requires over 200GB of VRAM to run in float16, and over 70GB of VRAM to run in int4. However, individuals have been successful at finetuning it on Apple Silicon laptops using the MLX framework. It features a 64K context window, twice that of their previous models (32K). + +The model was released over torrent, a method Mistral has recently often used for their releases. While the license has not been confirmed yet, a moderator on their Discord server yesterday suggested it was Apache 2.0 licensed. + +Sources: +• https://twitter.com/_philschmid/status/1778051363554934874 +• https://twitter.com/reach_vb/status/1777946948617605384",[],[],"[{'reaction': '👀', 'users': ['osanseviero', 'lewiswatson', 'tansutt', 'victor', 'ajibawa-2023', 'samusenps', 'antiven0m', 'Diegg', 'tcml', 'simonweniger', 'LucienL', 'marz1', 'clem'], 'count': 13}, {'reaction': '🚀', 'users': ['alobnayis', 'simonweniger', 'clem', 'Sshubam'], 'count': 4}, {'reaction': '😎', 'users': ['samusenps', 'clem'], 'count': 2}]",2024-04-10 15:01:08,2024-04-10 18:49:36.691,"[{'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}]",/posts/mrfakename/985266791506817,4083,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png,3221.0,Omar Sanseviero,osanseviero,711024068071505,"[{'type': 'text', 'value': 'Diaries of Open Source. Part 14 🤗', 'raw': 'Diaries of Open Source. Part 14 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥CohereForAI releases Command R+, an open 104B model with:', 'raw': '🔥CohereForAI releases Command R+, an open 104B model with:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Tool usage capabilities', 'raw': '- Tool usage capabilities'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Specialized in RAGs', 'raw': '- Specialized in RAGs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Multilingual', 'raw': '- Multilingual'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""It's one of the first models to surpass GPT-4 in the lmsys arena, check it out!"", 'raw': ""It's one of the first models to surpass GPT-4 in the lmsys arena, check it out!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'link', 'href': 'https://hf.co/CohereForAI/c4ai-command-r-plus', 'raw': 'https://hf.co/CohereForAI/c4ai-command-r-plus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Official demo: ', 'raw': 'Official demo: '}, {'type': 'link', 'href': 'https://hf.co/spaces/CohereForAI/c4ai-command-r-plus', 'raw': 'https://hf.co/spaces/CohereForAI/c4ai-command-r-plus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Quantized: ', 'raw': 'Quantized: '}, {'type': 'link', 'href': 'https://hf.co/CohereForAI/c4ai-command-r-plus-4bit', 'raw': 'https://hf.co/CohereForAI/c4ai-command-r-plus-4bit'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎉Google releases a new version of their Gemma instruct models, with improved quality, nicer to converse, and a fancier RL algorithm. The model is similar to Llama 2 70B in the Chat Arena!', 'raw': '🎉Google releases a new version of their Gemma instruct models, with improved quality, nicer to converse, and a fancier RL algorithm. The model is similar to Llama 2 70B in the Chat Arena!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Models: ', 'raw': 'Models: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'google/gemma-release-65d5efbccdbb8c4202ec078b'}, 'url': 'https://hf.co/collections/google/gemma-release-65d5efbccdbb8c4202ec078b', 'raw': 'https://hf.co/collections/google/gemma-release-65d5efbccdbb8c4202ec078b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out in HuggingChat ', 'raw': 'Try it out in HuggingChat '}, {'type': 'link', 'href': 'https://hf.co/chat/models/google/gemma-1.1-7b-it', 'raw': 'https://hf.co/chat/models/google/gemma-1.1-7b-it'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🪄VoiceCraft, a speech editing and TTS SOTA open model', 'raw': '🪄VoiceCraft, a speech editing and TTS SOTA open model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.16973'}, 'url': 'https://hf.co/papers/2403.16973', 'raw': 'https://hf.co/papers/2403.16973', 'label': 'VoiceCraft: Zero-Shot Speech Editing and Text-to-Speech in the Wild (2403.16973)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'pyp1/VoiceCraft'}, 'url': 'https://hf.co/pyp1/VoiceCraft', 'raw': 'https://hf.co/pyp1/VoiceCraft'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻Google released CodeGemma, a family of code generation, completion, and chat models', 'raw': '💻Google released CodeGemma, a family of code generation, completion, and chat models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog post: ', 'raw': 'Blog post: '}, {'type': 'link', 'href': 'https://hf.co/blog/codegemma', 'raw': 'https://hf.co/blog/codegemma'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Models: ', 'raw': 'Models: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'google/codegemma-release-66152ac7b683e2667abdee11'}, 'url': 'https://hf.co/collections/google/codegemma-release-66152ac7b683e2667abdee11', 'raw': 'https://hf.co/collections/google/codegemma-release-66152ac7b683e2667abdee11'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Report: ', 'raw': 'Report: '}, {'type': 'link', 'href': 'https://storage.googleapis.com/deepmind-media/gemma/codegemma_report.pdf', 'raw': 'https://storage.googleapis.com/deepmind-media/gemma/codegemma_report.pdf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Misc models:', 'raw': 'Misc models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🦖T-Rex2, a very powerful object detection model for many applications ', 'raw': '🦖T-Rex2, a very powerful object detection model for many applications '}, {'type': 'link', 'href': 'https://github.com/IDEA-Research/T-Rex', 'raw': 'https://github.com/IDEA-Research/T-Rex'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👀 CT-RATE : A 3D dataset paired with text reports ', 'raw': '👀 CT-RATE : A 3D dataset paired with text reports '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'ibrahimhamamci/CT-RATE'}, 'url': 'https://hf.co/datasets/ibrahimhamamci/CT-RATE', 'raw': 'https://hf.co/datasets/ibrahimhamamci/CT-RATE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐙Octopus v2: a Gemma-based model trained for Android API - extremely fast, better than Llama+RAG, great results ', 'raw': '🐙Octopus v2: a Gemma-based model trained for Android API - extremely fast, better than Llama+RAG, great results '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'NexaAIDev/Octopus-v2'}, 'url': 'https://hf.co/NexaAIDev/Octopus-v2', 'raw': 'https://hf.co/NexaAIDev/Octopus-v2'}]","Diaries of Open Source. Part 14 🤗 + +🔥CohereForAI releases Command R+, an open 104B model with: +- Tool usage capabilities +- Specialized in RAGs +- Multilingual +It's one of the first models to surpass GPT-4 in the lmsys arena, check it out! +Model: https://hf.co/CohereForAI/c4ai-command-r-plus +Official demo: https://hf.co/spaces/CohereForAI/c4ai-command-r-plus +Quantized: https://hf.co/CohereForAI/c4ai-command-r-plus-4bit + +🎉Google releases a new version of their Gemma instruct models, with improved quality, nicer to converse, and a fancier RL algorithm. The model is similar to Llama 2 70B in the Chat Arena! +Models: https://hf.co/collections/google/gemma-release-65d5efbccdbb8c4202ec078b +Try it out in HuggingChat https://hf.co/chat/models/google/gemma-1.1-7b-it + +🪄VoiceCraft, a speech editing and TTS SOTA open model +Paper: https://hf.co/papers/2403.16973 +Model: https://hf.co/pyp1/VoiceCraft + +💻Google released CodeGemma, a family of code generation, completion, and chat models +Blog post: https://hf.co/blog/codegemma +Models: https://hf.co/collections/google/codegemma-release-66152ac7b683e2667abdee11 +Report: https://storage.googleapis.com/deepmind-media/gemma/codegemma_report.pdf + +Misc models: +🦖T-Rex2, a very powerful object detection model for many applications https://github.com/IDEA-Research/T-Rex +👀 CT-RATE : A 3D dataset paired with text reports https://hf.co/datasets/ibrahimhamamci/CT-RATE +🐙Octopus v2: a Gemma-based model trained for Android API - extremely fast, better than Llama+RAG, great results https://hf.co/NexaAIDev/Octopus-v2",[],[],"[{'reaction': '🔥', 'users': ['qnguyen3', 'YaTharThShaRma999', 'samusenps', 'danielus', 'linoyts', 'simonweniger', 'clem', 'megrisdal', 'ashwinlimaye', 'USM-Valor', 'merve'], 'count': 11}]",2024-04-10 14:49:15,2024-04-15 13:04:17.425,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '638ef4c492f6fc2cd27590fb', 'avatarUrl': '/avatars/6f6823095d3073e80aa2a76657938717.svg', 'fullname': 'hennie', 'name': 'bonydork', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/osanseviero/711024068071505,11314,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,180992844061312,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Context versus Prior Knowledge in Language Models by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Context versus Prior Knowledge in Language Models by ""}, {'type': 'mention', 'user': 'kdu4108', 'raw': '@kdu4108'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'vesteinn', 'raw': '@vesteinn'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'niklasstoehr', 'raw': '@niklasstoehr'}, {'type': 'text', 'value': ' J. C. White A. Schein ', 'raw': ' J. C. White A. Schein '}, {'type': 'mention', 'user': 'rcotterell', 'raw': '@rcotterell'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This work examines the influence of context versus memorized knowledge in LMs through the lens of the shift caused by contexts at various degrees of informativeness to the models' predictive distribution. Understanding this difference is especially important in the context of knowledge conflicts between memorized and contextual information."", 'raw': ""This work examines the influence of context versus memorized knowledge in LMs through the lens of the shift caused by contexts at various degrees of informativeness to the models' predictive distribution. Understanding this difference is especially important in the context of knowledge conflicts between memorized and contextual information.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors propose disentangling context influence in terms of ""persuasion"", i.e. how impactful is the inclusion of the context for answers of a given query/entity pair, and ""susceptibility"", i.e. how much answers of a given query/entity pair are likely to be swayed by the presence of context, and operationalize these concepts using information-theoretic measures akin to mutual information.', 'raw': 'Authors propose disentangling context influence in terms of ""persuasion"", i.e. how impactful is the inclusion of the context for answers of a given query/entity pair, and ""susceptibility"", i.e. how much answers of a given query/entity pair are likely to be swayed by the presence of context, and operationalize these concepts using information-theoretic measures akin to mutual information.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The two metrics are validated using a synthetic dataset sourced from a knowledge graph. Analysis shows that:\u2028', 'raw': 'The two metrics are validated using a synthetic dataset sourced from a knowledge graph. Analysis shows that:\u2028'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The degree of persuasiveness of relevant contexts increases with the increase of model size (interesting implications here for the jailbreaking of LLMs!)', 'raw': '- The degree of persuasiveness of relevant contexts increases with the increase of model size (interesting implications here for the jailbreaking of LLMs!)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- assertive contexts tend to be more persuasive for closed queries (yes/no) and mid-sized models', 'raw': '- assertive contexts tend to be more persuasive for closed queries (yes/no) and mid-sized models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Negation affect context persuasiveness', 'raw': '- Negation affect context persuasiveness'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Familiar entities (explored as real vs. fake, more frequent in training data and more connected in the KG) are less susceptible to context influence', 'raw': '- Familiar entities (explored as real vs. fake, more frequent in training data and more connected in the KG) are less susceptible to context influence'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Finally, authors suggest applications of the persuasion/susceptibility framing for social science analyses and gender bias evaluation.', 'raw': 'Finally, authors suggest applications of the persuasion/susceptibility framing for social science analyses and gender bias evaluation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻 Code: ', 'raw': '💻 Code: '}, {'type': 'link', 'href': 'https://github.com/kdu4108/measureLM', 'raw': 'https://github.com/kdu4108/measureLM'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.04633'}, 'url': 'https://huggingface.co/papers/2404.04633', 'raw': 'https://huggingface.co/papers/2404.04633', 'label': 'Context versus Prior Knowledge in Language Models (2404.04633)'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 All daily picks: ', 'raw': '🔍 All daily picks: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-ofc-lms-65ae3339949c5675d25de2f9', 'raw': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-ofc-lms-65ae3339949c5675d25de2f9'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Context versus Prior Knowledge in Language Models by @kdu4108 @vesteinn @niklasstoehr J. C. White A. Schein @rcotterell + +This work examines the influence of context versus memorized knowledge in LMs through the lens of the shift caused by contexts at various degrees of informativeness to the models' predictive distribution. Understanding this difference is especially important in the context of knowledge conflicts between memorized and contextual information. + +Authors propose disentangling context influence in terms of ""persuasion"", i.e. how impactful is the inclusion of the context for answers of a given query/entity pair, and ""susceptibility"", i.e. how much answers of a given query/entity pair are likely to be swayed by the presence of context, and operationalize these concepts using information-theoretic measures akin to mutual information. + +The two metrics are validated using a synthetic dataset sourced from a knowledge graph. Analysis shows that:
 +- The degree of persuasiveness of relevant contexts increases with the increase of model size (interesting implications here for the jailbreaking of LLMs!) +- assertive contexts tend to be more persuasive for closed queries (yes/no) and mid-sized models +- Negation affect context persuasiveness +- Familiar entities (explored as real vs. fake, more frequent in training data and more connected in the KG) are less susceptible to context influence + +Finally, authors suggest applications of the persuasion/susceptibility framing for social science analyses and gender bias evaluation. + +💻 Code: https://github.com/kdu4108/measureLM +📄 Paper: https://huggingface.co/papers/2404.04633 + +🔍 All daily picks: https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-ofc-lms-65ae3339949c5675d25de2f9","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/M_e5Xt_OK1gZ0llKCUku7.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/thh6WrBdyO2XFt7U0FPFE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/Rm94A5dE2dFtSLHw6MYUV.png'}]","[{'_id': '656f312240c7cc8aea012103', 'avatarUrl': '/avatars/cc01563723f70516e4aefd3da625bfb7.svg', 'fullname': 'Kevin Du', 'name': 'kdu4108', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '62a06ed13df0325603b5f038', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a06ed13df0325603b5f038/VYFIR3T6_CBfGfP5T8nIK.jpeg', 'fullname': 'Niklas Stoehr', 'name': 'niklasstoehr', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}, {'_id': '639361c996eabec668bdfcfc', 'avatarUrl': '/avatars/b719127447ed71eb128f418765cb0cce.svg', 'fullname': 'Ryan Cotterell', 'name': 'rcotterell', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '5f915e09161dc51925934edf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f915e09161dc51925934edf/2PqjbJ7FVG3FOCNmn7ORY.png', 'fullname': 'Vésteinn Snæbjarnarson', 'name': 'vesteinn', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9}]","[{'reaction': '👍', 'users': ['niklasstoehr', 'Lei-bw'], 'count': 2}, {'reaction': '❤️', 'users': ['afrideva'], 'count': 1}]",2024-04-10 14:30:40,2024-04-10 14:30:40.328,[],/posts/gsarti/180992844061312,2233,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/2RK8J_YSNAK2ob8XZH7w2.jpeg,1740.0,Julian Bilcke,jbilcke-hf,876617081871117,"[{'type': 'text', 'value': 'I\'m extending the AI Comic Factory (to follow-up on the ""bring your own model"" philosophy 🤗) to support the broader LLM ecosystem of our other vendor friends!', 'raw': 'I\'m extending the AI Comic Factory (to follow-up on the ""bring your own model"" philosophy 🤗) to support the broader LLM ecosystem of our other vendor friends!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Here is the announcement: ', 'raw': 'Here is the announcement: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'jbilcke-hf/ai-comic-factory', 'discussionNum': 723}, 'url': 'https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory/discussions/723', 'raw': 'https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory/discussions/723'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is an experimental feature, some ', 'raw': 'This is an experimental feature, some '}, {'type': 'text', 'raw': 'models/vendors', 'value': 'models/vendors'}, {'type': 'text', 'value': "" might require parameter tuning but I haven't tested all of them yet (only a bit of GPT-4 👀)"", 'raw': "" might require parameter tuning but I haven't tested all of them yet (only a bit of GPT-4 👀)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Let me know if you experience any issues!', 'raw': 'Let me know if you experience any issues!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'jbilcke-hf/ai-comic-factory'}, 'url': 'https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory', 'raw': 'https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory'}]","I'm extending the AI Comic Factory (to follow-up on the ""bring your own model"" philosophy 🤗) to support the broader LLM ecosystem of our other vendor friends! + +Here is the announcement: +https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory/discussions/723 + +This is an experimental feature, some models/vendors might require parameter tuning but I haven't tested all of them yet (only a bit of GPT-4 👀) + +Let me know if you experience any issues! + +https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64904918584563e08e84d39b/lVJupbK5Q_fPj6gOfyxiw.png'}]",[],"[{'reaction': '🔥', 'users': ['m-ric', 'samusenps', 'kramp', 'thomwolf'], 'count': 4}, {'reaction': '❤️', 'users': ['ari1000'], 'count': 1}]",2024-04-10 14:07:39,2024-04-19 22:41:41.835,"[{'_id': '64904918584563e08e84d39b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/2RK8J_YSNAK2ob8XZH7w2.jpeg', 'fullname': 'Julian Bilcke', 'name': 'jbilcke-hf', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1740, 'isFollowing': False}, {'_id': '66215fe43cf00a10342f4d8a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/EWIWVQMz9BApVOa4tSJt9.jpeg', 'fullname': 'Kirito kirigaya', 'name': 'KIRITO4223', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/jbilcke-hf/876617081871117,6510,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/62f93abbc4817cfc0756b6f8/rGYLaq-rmoJJYotkC1VXk.jpeg,74.0,Anton Obukhov,toshas,676060553776686,"[{'type': 'text', 'value': 'Another gem from our lab — DGInStyle! We use Stable Diffusion to generate semantic segmentation data for autonomous driving and train domain-generalizable networks.', 'raw': 'Another gem from our lab — DGInStyle! We use Stable Diffusion to generate semantic segmentation data for autonomous driving and train domain-generalizable networks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📟 Website: ', 'raw': '📟 Website: '}, {'type': 'link', 'href': 'https://dginstyle.github.io', 'raw': 'https://dginstyle.github.io'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧾 Paper: ', 'raw': '🧾 Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2312.03048', 'raw': 'https://arxiv.org/abs/2312.03048'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 Hugging Face Paper: ', 'raw': '🤗 Hugging Face Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2312.03048'}, 'url': 'https://huggingface.co/papers/2312.03048', 'raw': 'https://huggingface.co/papers/2312.03048', 'label': 'DGInStyle: Domain-Generalizable Semantic Segmentation with Image\n Diffusion Models and Stylized Semantic Control (2312.03048)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 Hugging Face Model: ', 'raw': '🤗 Hugging Face Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'yurujaja/DGInStyle'}, 'url': 'https://huggingface.co/yurujaja/DGInStyle', 'raw': 'https://huggingface.co/yurujaja/DGInStyle'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐙 Code: ', 'raw': '🐙 Code: '}, {'type': 'link', 'href': 'https://github.com/yurujaja/DGInStyle', 'raw': 'https://github.com/yurujaja/DGInStyle'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In a nutshell, our pipeline overcomes the resolution loss of Stable Diffusion latent space and the style bias of ControlNet, as shown in the attached figures. This allows us to generate sufficiently high-quality pairs of images and semantic masks to train domain-generalizable semantic segmentation networks. ', 'raw': 'In a nutshell, our pipeline overcomes the resolution loss of Stable Diffusion latent space and the style bias of ControlNet, as shown in the attached figures. This allows us to generate sufficiently high-quality pairs of images and semantic masks to train domain-generalizable semantic segmentation networks. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Team: Yuru Jia (', 'raw': 'Team: Yuru Jia ('}, {'type': 'mention', 'user': 'yurujaja', 'raw': '@yurujaja'}, {'type': 'text', 'value': '), Lukas Hoyer, Shengyu Huang, Tianfu Wang (', 'raw': '), Lukas Hoyer, Shengyu Huang, Tianfu Wang ('}, {'type': 'mention', 'user': 'Tianfwang', 'raw': '@Tianfwang'}, {'type': 'text', 'value': '), Luc Van Gool, Konrad Schindler, and Anton Obukhov (', 'raw': '), Luc Van Gool, Konrad Schindler, and Anton Obukhov ('}, {'type': 'mention', 'user': 'toshas', 'raw': '@toshas'}, {'type': 'text', 'value': ').', 'raw': ').'}]","Another gem from our lab — DGInStyle! We use Stable Diffusion to generate semantic segmentation data for autonomous driving and train domain-generalizable networks. + +📟 Website: https://dginstyle.github.io +🧾 Paper: https://arxiv.org/abs/2312.03048 +🤗 Hugging Face Paper: https://huggingface.co/papers/2312.03048 +🤗 Hugging Face Model: https://huggingface.co/yurujaja/DGInStyle +🐙 Code: https://github.com/yurujaja/DGInStyle + +In a nutshell, our pipeline overcomes the resolution loss of Stable Diffusion latent space and the style bias of ControlNet, as shown in the attached figures. This allows us to generate sufficiently high-quality pairs of images and semantic masks to train domain-generalizable semantic segmentation networks. + +Team: Yuru Jia (@yurujaja), Lukas Hoyer, Shengyu Huang, Tianfu Wang (@Tianfwang), Luc Van Gool, Konrad Schindler, and Anton Obukhov (@toshas).","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f93abbc4817cfc0756b6f8/p6nz7SW15cMh8h2lHvcsV.qt'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f93abbc4817cfc0756b6f8/l0AckKGOOqzA8LDC5avij.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f93abbc4817cfc0756b6f8/GqvWNIqe09Cu_Zwjj1Yyp.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f93abbc4817cfc0756b6f8/IHEMpONnUZjPwpVR-asts.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/62f93abbc4817cfc0756b6f8/hTVqgpYHiXTP2sqOM3TKd.jpeg'}]","[{'_id': '633c25e000732349209d4e79', 'avatarUrl': '/avatars/0fd853f6cbfcacc98035364585d58f2c.svg', 'fullname': 'Tianfu Wang', 'name': 'Tianfwang', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '62f93abbc4817cfc0756b6f8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62f93abbc4817cfc0756b6f8/rGYLaq-rmoJJYotkC1VXk.jpeg', 'fullname': 'Anton Obukhov', 'name': 'toshas', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 74}, {'_id': '63f629844b831cc179bcdedd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63f629844b831cc179bcdedd/RemYmSShhthKWtLQroyvY.jpeg', 'fullname': 'Yuru Jia', 'name': 'yurujaja', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}]","[{'reaction': '❤️', 'users': ['samusenps', 'clem', 'Joseph717171', 'oceansweep', 'Timilla'], 'count': 5}, {'reaction': '🚀', 'users': ['osanseviero', 'clem', 'Joseph717171', 'oceansweep'], 'count': 4}, {'reaction': '👍', 'users': ['Timilla', 'Joseph717171', 'oceansweep'], 'count': 3}]",2024-04-10 13:36:20,2024-04-10 13:36:20.987,[],/posts/toshas/676060553776686,1988,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,337623249328427,"[{'type': 'text', 'value': 'LLM2Vec', 'raw': 'LLM2Vec'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Large Language Models Are Secretly Powerful Text Encoders', 'raw': 'Large Language Models Are Secretly Powerful Text Encoders'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.05961'}, 'url': 'https://huggingface.co/papers/2404.05961', 'raw': 'https://huggingface.co/papers/2404.05961', 'label': 'LLM2Vec: Large Language Models Are Secretly Powerful Text Encoders (2404.05961)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Large decoder-only language models (LLMs) are the state-of-the-art models on most of today's NLP tasks and benchmarks. Yet, the community is only slowly adopting these models for text embedding tasks, which require rich contextualized representations. In this work, we introduce LLM2Vec, a simple unsupervised approach that can transform any decoder-only LLM into a strong text encoder. LLM2Vec consists of three simple steps: 1) enabling bidirectional attention, 2) masked next token prediction, and 3) unsupervised contrastive learning. We demonstrate the effectiveness of LLM2Vec by applying it to 3 popular LLMs ranging from 1.3B to 7B parameters and evaluate the transformed models on English word- and sequence-level tasks. We outperform encoder-only models by a large margin on word-level tasks and reach a new unsupervised state-of-the-art performance on the Massive Text Embeddings Benchmark (MTEB). Moreover, when combining LLM2Vec with supervised contrastive learning, we achieve state-of-the-art performance on MTEB among models that train only on publicly available data. Our strong empirical results and extensive analysis demonstrate that LLMs can be effectively transformed into universal text encoders in a parameter-efficient manner without the need for expensive adaptation or synthetic GPT-4 generated data."", 'raw': ""Large decoder-only language models (LLMs) are the state-of-the-art models on most of today's NLP tasks and benchmarks. Yet, the community is only slowly adopting these models for text embedding tasks, which require rich contextualized representations. In this work, we introduce LLM2Vec, a simple unsupervised approach that can transform any decoder-only LLM into a strong text encoder. LLM2Vec consists of three simple steps: 1) enabling bidirectional attention, 2) masked next token prediction, and 3) unsupervised contrastive learning. We demonstrate the effectiveness of LLM2Vec by applying it to 3 popular LLMs ranging from 1.3B to 7B parameters and evaluate the transformed models on English word- and sequence-level tasks. We outperform encoder-only models by a large margin on word-level tasks and reach a new unsupervised state-of-the-art performance on the Massive Text Embeddings Benchmark (MTEB). Moreover, when combining LLM2Vec with supervised contrastive learning, we achieve state-of-the-art performance on MTEB among models that train only on publicly available data. Our strong empirical results and extensive analysis demonstrate that LLMs can be effectively transformed into universal text encoders in a parameter-efficient manner without the need for expensive adaptation or synthetic GPT-4 generated data.""}, {'type': 'new_line', 'raw': '\n'}]","LLM2Vec + +Large Language Models Are Secretly Powerful Text Encoders + +https://huggingface.co/papers/2404.05961 + +Large decoder-only language models (LLMs) are the state-of-the-art models on most of today's NLP tasks and benchmarks. Yet, the community is only slowly adopting these models for text embedding tasks, which require rich contextualized representations. In this work, we introduce LLM2Vec, a simple unsupervised approach that can transform any decoder-only LLM into a strong text encoder. LLM2Vec consists of three simple steps: 1) enabling bidirectional attention, 2) masked next token prediction, and 3) unsupervised contrastive learning. We demonstrate the effectiveness of LLM2Vec by applying it to 3 popular LLMs ranging from 1.3B to 7B parameters and evaluate the transformed models on English word- and sequence-level tasks. We outperform encoder-only models by a large margin on word-level tasks and reach a new unsupervised state-of-the-art performance on the Massive Text Embeddings Benchmark (MTEB). Moreover, when combining LLM2Vec with supervised contrastive learning, we achieve state-of-the-art performance on MTEB among models that train only on publicly available data. Our strong empirical results and extensive analysis demonstrate that LLMs can be effectively transformed into universal text encoders in a parameter-efficient manner without the need for expensive adaptation or synthetic GPT-4 generated data. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/y8ZJR3T0xtEYg6jKu6XuJ.png'}]",[],"[{'reaction': '🔥', 'users': ['SivilTaram', 'osanseviero', 'CarlLee', 'oliverguhr', 'KvrParaskevi', 'm18coppola', 'celinah'], 'count': 7}, {'reaction': '👍', 'users': ['AshokRaja', 'Sylvestre'], 'count': 2}, {'reaction': '👀', 'users': ['robb-0'], 'count': 1}]",2024-04-10 13:24:16,2024-04-10 13:24:16.519,[],/posts/akhaliq/337623249328427,3283,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,437200140744337,"[{'type': 'text', 'value': '𝗡𝗲𝘄 𝗦𝗽𝗮𝗰𝗲: 𝘼𝙄 𝙏𝙧𝙖𝙫𝙚𝙡 𝙥𝙡𝙖𝙣𝙣𝙚𝙧 🗺️🏕️ Plan your next vacation in a few minutes!', 'raw': '𝗡𝗲𝘄 𝗦𝗽𝗮𝗰𝗲: 𝘼𝙄 𝙏𝙧𝙖𝙫𝙚𝙡 𝙥𝙡𝙖𝙣𝙣𝙚𝙧 🗺️🏕️ Plan your next vacation in a few minutes!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I wanted to try out if a powerful LLM like Mixtral-8x7b had geographical reasoning capabilities.', 'raw': 'I wanted to try out if a powerful LLM like Mixtral-8x7b had geographical reasoning capabilities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'So I built a small space that prompts the LLM to provide a JSON list of places based on a user input.', 'raw': 'So I built a small space that prompts the LLM to provide a JSON list of places based on a user input.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And the result was impressive! 🤯 ', 'raw': 'And the result was impressive! 🤯 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⇒ 𝗜𝘁 𝘀𝗲𝗲𝗺𝘀 𝗹𝗶𝗸𝗲 𝗠𝗶𝘅𝘁𝗿𝗮𝗹 𝗵𝗮𝘀 𝗮 𝗴𝗿𝗮𝘀𝗽 𝗼𝗳 𝗴𝗲𝗼𝗴𝗿𝗮𝗽𝗵𝗶𝗰𝗮𝗹 𝗰𝗼𝗻𝗰𝗲𝗽𝘁𝘀 𝗹𝗶𝗸𝗲 𝗡𝗼𝗿𝘁𝗵 - 𝗦𝗼𝘂𝘁𝗵, 𝗼𝗿 𝘀𝗽𝗮𝘁𝗶𝗮𝗹 𝗮𝗹𝗶𝗴𝗻𝗺𝗲𝗻𝘁.🧭 Not just describing these concepts, but really applying them in practice, for instance to successfully answer ""give me 4 European cities that are aligned on the map"". This is a 𝗻𝗶𝗰𝗲 𝗲𝘅𝗮𝗺𝗽𝗹𝗲 𝗼𝗳 𝗮𝗻 𝗲𝗺𝗲𝗿𝗴𝗲𝗻𝘁 𝗰𝗮𝗽𝗮𝗯𝗶𝗹𝗶𝘁𝘆, since nothing in the LLM\'s training data should prepare it for this specific task.', 'raw': '⇒ 𝗜𝘁 𝘀𝗲𝗲𝗺𝘀 𝗹𝗶𝗸𝗲 𝗠𝗶𝘅𝘁𝗿𝗮𝗹 𝗵𝗮𝘀 𝗮 𝗴𝗿𝗮𝘀𝗽 𝗼𝗳 𝗴𝗲𝗼𝗴𝗿𝗮𝗽𝗵𝗶𝗰𝗮𝗹 𝗰𝗼𝗻𝗰𝗲𝗽𝘁𝘀 𝗹𝗶𝗸𝗲 𝗡𝗼𝗿𝘁𝗵 - 𝗦𝗼𝘂𝘁𝗵, 𝗼𝗿 𝘀𝗽𝗮𝘁𝗶𝗮𝗹 𝗮𝗹𝗶𝗴𝗻𝗺𝗲𝗻𝘁.🧭 Not just describing these concepts, but really applying them in practice, for instance to successfully answer ""give me 4 European cities that are aligned on the map"". This is a 𝗻𝗶𝗰𝗲 𝗲𝘅𝗮𝗺𝗽𝗹𝗲 𝗼𝗳 𝗮𝗻 𝗲𝗺𝗲𝗿𝗴𝗲𝗻𝘁 𝗰𝗮𝗽𝗮𝗯𝗶𝗹𝗶𝘁𝘆, since nothing in the LLM\'s training data should prepare it for this specific task.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Anyway, I added API calls and a nice visualization on top of the LLM, streaming output, caching for the answers and locations... and ta-da! ✨ I got the 𝗔𝗜 𝗧𝗿𝗮𝘃𝗲𝗹 𝗣𝗹𝗮𝗻𝗻𝗲𝗿.', 'raw': 'Anyway, I added API calls and a nice visualization on top of the LLM, streaming output, caching for the answers and locations... and ta-da! ✨ I got the 𝗔𝗜 𝗧𝗿𝗮𝘃𝗲𝗹 𝗣𝗹𝗮𝗻𝗻𝗲𝗿.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝙔𝙤𝙪 𝙘𝙖𝙣 𝙙𝙚𝙨𝙘𝙧𝙞𝙗𝙚 𝙞𝙩 𝙮𝙤𝙪𝙧 𝙩𝙧𝙞𝙥, 𝙖𝙣𝙙 𝙞𝙩 𝙬𝙞𝙡𝙡 𝙘𝙤𝙢𝙚 𝙪𝙥 𝙬𝙞𝙩𝙝 𝙣𝙞𝙘𝙚 𝙖𝙣𝙙 𝙘𝙤𝙣𝙫𝙚𝙣𝙞𝙚𝙣𝙩 𝙡𝙤𝙘𝙖𝙩𝙞𝙤𝙣𝙨!', 'raw': '𝙔𝙤𝙪 𝙘𝙖𝙣 𝙙𝙚𝙨𝙘���𝙞𝙗𝙚 𝙞𝙩 𝙮𝙤𝙪𝙧 𝙩𝙧𝙞𝙥, 𝙖𝙣𝙙 𝙞𝙩 𝙬𝙞𝙡𝙡 𝙘𝙤𝙢𝙚 𝙪𝙥 𝙬𝙞𝙩𝙝 𝙣𝙞𝙘𝙚 𝙖𝙣𝙙 𝙘𝙤𝙣𝙫𝙚𝙣𝙞𝙚𝙣𝙩 𝙡𝙤𝙘𝙖𝙩𝙞𝙤𝙣𝙨!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝙏𝙧𝙮 𝙞𝙩 𝙝𝙚𝙧𝙚 👉 ', 'raw': '𝙏𝙧𝙮 𝙞𝙩 𝙝𝙚𝙧𝙚 👉 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'm-ric/ai-travel-planner'}, 'url': 'https://huggingface.co/spaces/m-ric/ai-travel-planner', 'raw': 'https://huggingface.co/spaces/m-ric/ai-travel-planner'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thank you ', 'raw': 'Thank you '}, {'type': 'mention', 'user': 'freddyaboulton', 'raw': '@freddyaboulton'}, {'type': 'text', 'value': ' for the 𝚐𝚛𝚊𝚍𝚒𝚘_𝚏𝚘𝚕𝚒𝚞𝚖 component, and ', 'raw': ' for the 𝚐𝚛𝚊𝚍𝚒𝚘_𝚏𝚘𝚕𝚒𝚞𝚖 component, and '}, {'type': 'mention', 'user': 'clem', 'raw': '@clem'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'pngwn', 'raw': '@pngwn'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'abidlabs', 'raw': '@abidlabs'}, {'type': 'text', 'value': ' for your ideas and support!', 'raw': ' for your ideas and support!'}]","𝗡𝗲𝘄 𝗦𝗽𝗮𝗰𝗲: 𝘼𝙄 𝙏𝙧𝙖𝙫𝙚𝙡 𝙥𝙡𝙖𝙣𝙣𝙚𝙧 🗺️🏕️ Plan your next vacation in a few minutes! + +I wanted to try out if a powerful LLM like Mixtral-8x7b had geographical reasoning capabilities. +So I built a small space that prompts the LLM to provide a JSON list of places based on a user input. + +And the result was impressive! 🤯 + +⇒ 𝗜𝘁 𝘀𝗲𝗲𝗺𝘀 𝗹𝗶𝗸𝗲 𝗠𝗶𝘅𝘁𝗿𝗮𝗹 𝗵𝗮𝘀 𝗮 𝗴𝗿𝗮𝘀𝗽 𝗼𝗳 𝗴𝗲𝗼𝗴𝗿𝗮𝗽𝗵𝗶𝗰𝗮𝗹 𝗰𝗼𝗻𝗰𝗲𝗽𝘁𝘀 𝗹𝗶𝗸𝗲 𝗡𝗼𝗿𝘁𝗵 - 𝗦𝗼𝘂𝘁𝗵, 𝗼𝗿 𝘀𝗽𝗮𝘁𝗶𝗮𝗹 𝗮𝗹𝗶𝗴𝗻𝗺𝗲𝗻𝘁.🧭 Not just describing these concepts, but really applying them in practice, for instance to successfully answer ""give me 4 European cities that are aligned on the map"". This is a 𝗻𝗶𝗰𝗲 𝗲𝘅𝗮𝗺𝗽𝗹𝗲 𝗼𝗳 𝗮𝗻 𝗲𝗺𝗲𝗿𝗴𝗲𝗻𝘁 𝗰𝗮𝗽𝗮𝗯𝗶𝗹𝗶𝘁𝘆, since nothing in the LLM's training data should prepare it for this specific task. + +Anyway, I added API calls and a nice visualization on top of the LLM, streaming output, caching for the answers and locations... and ta-da! ✨ I got the 𝗔𝗜 𝗧𝗿𝗮𝘃𝗲𝗹 𝗣𝗹𝗮𝗻𝗻𝗲𝗿. + +𝙔𝙤𝙪 𝙘𝙖𝙣 𝙙𝙚𝙨𝙘𝙧𝙞𝙗𝙚 𝙞𝙩 𝙮𝙤𝙪𝙧 𝙩𝙧𝙞𝙥, 𝙖𝙣𝙙 𝙞𝙩 𝙬𝙞𝙡𝙡 𝙘𝙤𝙢𝙚 𝙪𝙥 𝙬𝙞𝙩𝙝 𝙣𝙞𝙘𝙚 𝙖𝙣𝙙 𝙘𝙤𝙣𝙫𝙚𝙣𝙞𝙚𝙣𝙩 𝙡𝙤𝙘𝙖𝙩𝙞𝙤𝙣𝙨! + +𝙏𝙧𝙮 𝙞𝙩 𝙝𝙚𝙧𝙚 👉 https://huggingface.co/spaces/m-ric/ai-travel-planner + +Thank you @freddyaboulton for the 𝚐𝚛𝚊𝚍𝚒𝚘_𝚏𝚘𝚕𝚒𝚞𝚖 component, and @clem , @pngwn , @abidlabs for your ideas and support!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63d10d4e8eaa4831005e92b5/EdcSn_rsFZeIcpPWXQnD2.png'}]","[{'_id': '608b8bb39d7c9519b4adae19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png', 'fullname': 'Abubakar Abid', 'name': 'abidlabs', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 883}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489}, {'_id': '626a9bfa03e2e2796f24ca11', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1654278567459-626a9bfa03e2e2796f24ca11.jpeg', 'fullname': 'Freddy Boulton', 'name': 'freddyaboulton', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 295}, {'_id': '61d5bf2f0435582ab69f8f6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1641398053462-noauth.jpeg', 'fullname': 'Pete', 'name': 'pngwn', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 147}]","[{'reaction': '🚀', 'users': ['pngwn', 'lunarflu', 'osanseviero', 'victor', 'clem', 'monsoon-nlp', 'freddyaboulton'], 'count': 7}, {'reaction': '🔥', 'users': ['pngwn', 'lunarflu', 'clem', 'Locutusque', 'adamelliotfields', 'raincandy-u', 'freddyaboulton'], 'count': 7}, {'reaction': '🤗', 'users': ['lunarflu', 'clem', 'freddyaboulton'], 'count': 3}, {'reaction': '🤯', 'users': ['lunarflu', 'clem', 'freddyaboulton'], 'count': 3}]",2024-04-10 11:37:34,2024-04-10 11:49:07.292,"[{'_id': '61d5bf2f0435582ab69f8f6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1641398053462-noauth.jpeg', 'fullname': 'Pete', 'name': 'pngwn', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 147, 'isFollowing': False}]",/posts/m-ric/437200140744337,2233,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/630430583926de1f7ec62c6b/mVQsL71KrGUs2H5hCTuO7.jpeg,245.0,Quan Nguyen,qnguyen3,200539693018197,"[{'type': 'text', 'value': '🎉 Introducing nanoLLaVA, a powerful multimodal AI model that packs the capabilities of a 1B parameter vision language model into just 5GB of VRAM. 🚀 This makes it an ideal choice for edge devices, bringing cutting-edge visual understanding and generation to your devices like never before. 📱💻', 'raw': '🎉 Introducing nanoLLaVA, a powerful multimodal AI model that packs the capabilities of a 1B parameter vision language model into just 5GB of VRAM. 🚀 This makes it an ideal choice for edge devices, bringing cutting-edge visual understanding and generation to your devices like never before. 📱💻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'qnguyen3/nanoLLaVA'}, 'url': 'https://huggingface.co/qnguyen3/nanoLLaVA', 'raw': 'https://huggingface.co/qnguyen3/nanoLLaVA'}, {'type': 'text', 'value': ' 🔍', 'raw': ' 🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Spaces: ', 'raw': 'Spaces: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'qnguyen3/nanoLLaVA'}, 'url': 'https://huggingface.co/spaces/qnguyen3/nanoLLaVA', 'raw': 'https://huggingface.co/spaces/qnguyen3/nanoLLaVA'}, {'type': 'text', 'value': ' (thanks to ', 'raw': ' (thanks to '}, {'type': 'mention', 'user': 'merve', 'raw': '@merve'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Under the hood, nanoLLaVA is based on the powerful ', 'raw': 'Under the hood, nanoLLaVA is based on the powerful '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'vilm/Quyen-SE-v0.1'}, 'url': 'https://huggingface.co/vilm/Quyen-SE-v0.1', 'raw': 'https://huggingface.co/vilm/Quyen-SE-v0.1'}, {'type': 'text', 'value': "" (my Qwen1.5-0.5B finetune) and Google's impressive "", 'raw': "" (my Qwen1.5-0.5B finetune) and Google's impressive ""}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'google/siglip-so400m-patch14-384'}, 'url': 'https://huggingface.co/google/siglip-so400m-patch14-384', 'raw': 'https://huggingface.co/google/siglip-so400m-patch14-384'}, {'type': 'text', 'value': '. 🧠 The model is trained using a data-centric approach to ensure optimal performance. 📊', 'raw': '. 🧠 The model is trained using a data-centric approach to ensure optimal performance. 📊'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the spirit of transparency and collaboration, all code and model weights are open-sourced under the Apache 2.0 license. 🤝', 'raw': 'In the spirit of transparency and collaboration, all code and model weights are open-sourced under the Apache 2.0 license. 🤝'}, {'type': 'new_line', 'raw': '\n'}]","🎉 Introducing nanoLLaVA, a powerful multimodal AI model that packs the capabilities of a 1B parameter vision language model into just 5GB of VRAM. 🚀 This makes it an ideal choice for edge devices, bringing cutting-edge visual understanding and generation to your devices like never before. 📱💻 + +Model: https://huggingface.co/qnguyen3/nanoLLaVA 🔍 +Spaces: https://huggingface.co/spaces/qnguyen3/nanoLLaVA (thanks to @merve) + +Under the hood, nanoLLaVA is based on the powerful https://huggingface.co/vilm/Quyen-SE-v0.1 (my Qwen1.5-0.5B finetune) and Google's impressive https://huggingface.co/google/siglip-so400m-patch14-384. 🧠 The model is trained using a data-centric approach to ensure optimal performance. 📊 + +In the spirit of transparency and collaboration, all code and model weights are open-sourced under the Apache 2.0 license. 🤝 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/630430583926de1f7ec62c6b/f1Q_foPsVfKhyge9JeMSg.png'}]","[{'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786}]","[{'reaction': '🔥', 'users': ['samusenps', 'SivilTaram', 'DmitryRyumin', 'm-ric', 'osanseviero', 'ArthurFischel', 'den0620', '2dts', 'merve', 'qnguyen3', 'ggcristian'], 'count': 11}, {'reaction': '❤️', 'users': ['samusenps', 'SivilTaram', 'ndamulelonemakh', 'osanseviero', 'penma', '2dts', 'cnmoro', 'merve'], 'count': 8}, {'reaction': '🚀', 'users': ['SivilTaram', '2dts'], 'count': 2}, {'reaction': '🤗', 'users': ['ijohn07'], 'count': 1}]",2024-04-10 02:37:07,2024-06-24 12:23:30.358,"[{'_id': '64da082a79c87e13ca232042', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64da082a79c87e13ca232042/DKGEIffK3kLj7SQ20yYiK.png', 'fullname': 'Cristian Gutiérrez', 'name': 'ggcristian', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}]",/posts/qnguyen3/200539693018197,6035,,1 +/avatars/daf716b52a4d91e79f0df1c9cb081613.svg,6.0,Alexander Kunitsyn,sashakunitsyn,553563600596689,"[{'type': 'text', 'value': 'Introducing VLRM: Vision-Language Models act as Reward Models for Image Captioning', 'raw': 'Introducing VLRM: Vision-Language Models act as Reward Models for Image Captioning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper:', 'raw': 'Paper:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/abs/2404.01911', 'raw': 'https://arxiv.org/abs/2404.01911'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model weights and training logs:', 'raw': 'Model weights and training logs:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'sashakunitsyn/vlrm-blip2-opt-2.7b'}, 'url': 'https://huggingface.co/sashakunitsyn/vlrm-blip2-opt-2.7b', 'raw': 'https://huggingface.co/sashakunitsyn/vlrm-blip2-opt-2.7b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We propose VLRM, a method for fine-tuning an existing image captioning model using reinforcement learning and vision-language models as reward models.', 'raw': 'We propose VLRM, a method for fine-tuning an existing image captioning model using reinforcement learning and vision-language models as reward models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The method manages to significantly improve the generation quality without human-labeled data and is applicable to any image captioning model.', 'raw': 'The method manages to significantly improve the generation quality without human-labeled data and is applicable to any image captioning model.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Our model reaches impressive 0.90 R@1 CLIP Recall score on MS-COCO Carpathy Test Split.', 'raw': 'Our model reaches impressive 0.90 R@1 CLIP Recall score on MS-COCO Carpathy Test Split.'}]","Introducing VLRM: Vision-Language Models act as Reward Models for Image Captioning +Paper: +https://arxiv.org/abs/2404.01911 +Model weights and training logs: +https://huggingface.co/sashakunitsyn/vlrm-blip2-opt-2.7b + +We propose VLRM, a method for fine-tuning an existing image captioning model using reinforcement learning and vision-language models as reward models. +The method manages to significantly improve the generation quality without human-labeled data and is applicable to any image captioning model. +Our model reaches impressive 0.90 R@1 CLIP Recall score on MS-COCO Carpathy Test Split.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/635799e5265b76ad9edef8d9/6YKgoME5ZiRUmc_dyGlcB.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/635799e5265b76ad9edef8d9/7h45RN8KQp6nCw0dmeZCH.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/635799e5265b76ad9edef8d9/jYnzcNiAkq32avwd4DA_z.jpeg'}]",[],"[{'reaction': '❤️', 'users': ['samusenps', 'nikgr', 'victor', 'clem', 'osanseviero', 'merve', 'Kukedlc', 'AlkaouDembele', 'xziayro'], 'count': 9}, {'reaction': '➕', 'users': ['samusenps', 'merve'], 'count': 2}]",2024-04-03 13:16:01,2024-04-03 13:16:01.622,[],/posts/sashakunitsyn/553563600596689,1810,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,763522030257659,"[{'type': 'text', 'value': '🚀🎭🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟 🎭🚀', 'raw': '🚀🎭🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟 🎭🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: MonoAvatar++: Efficient 3D Implicit Head Avatar with Mesh-anchored Hash Table Blendshapes 🔝', 'raw': '📄 Title: MonoAvatar++: Efficient 3D Implicit Head Avatar with Mesh-anchored Hash Table Blendshapes 🔝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: MonoAvatar++ is a real-time neural implicit 3D head avatar model with high quality and fine-grained control over facial expressions. It uses local hash table blendshapes attached to a parametric facial model for efficient rendering, achieving SOTA results even for challenging expressions.', 'raw': '📝 Description: MonoAvatar++ is a real-time neural implicit 3D head avatar model with high quality and fine-grained control over facial expressions. It uses local hash table blendshapes attached to a parametric facial model for efficient rendering, achieving SOTA results even for challenging expressions.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Ziqian Bai, Feitong Tan, Sean Fanello, Rohit Pandey, Mingsong Dou, Shichen Liu, Ping Tan, Yinda Zhang', 'raw': '👥 Authors: Ziqian Bai, Feitong Tan, Sean Fanello, Rohit Pandey, Mingsong Dou, Shichen Liu, Ping Tan, Yinda Zhang'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸', 'raw': '📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.01543'}, 'url': 'https://huggingface.co/papers/2404.01543', 'raw': 'https://huggingface.co/papers/2404.01543', 'label': 'Efficient 3D Implicit Head Avatar with Mesh-anchored Hash Table\n Blendshapes (2404.01543)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://augmentedperception.github.io/monoavatar-plus', 'raw': 'https://augmentedperception.github.io/monoavatar-plus'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #MonoAvatar++ #HeadAvatar #3DModeling #AvatarGeneration #NeuralImplicitAvatar #EfficientRendering #CVPR2024 #DeepLearning #Innovation', 'raw': '🔍 Keywords: #MonoAvatar++ #HeadAvatar #3DModeling #AvatarGeneration #NeuralImplicitAvatar #EfficientRendering #CVPR2024 #DeepLearning #Innovation'}]","🚀🎭🌟 New Research Alert - CVPR 2024 (Avatars Collection)! 🌟 🎭🚀 +📄 Title: MonoAvatar++: Efficient 3D Implicit Head Avatar with Mesh-anchored Hash Table Blendshapes 🔝 + +📝 Description: MonoAvatar++ is a real-time neural implicit 3D head avatar model with high quality and fine-grained control over facial expressions. It uses local hash table blendshapes attached to a parametric facial model for efficient rendering, achieving SOTA results even for challenging expressions. + +👥 Authors: Ziqian Bai, Feitong Tan, Sean Fanello, Rohit Pandey, Mingsong Dou, Shichen Liu, Ping Tan, Yinda Zhang + +📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸 + +🔗 Paper: https://huggingface.co/papers/2404.01543 + +🌐 Github Page: https://augmentedperception.github.io/monoavatar-plus + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #MonoAvatar++ #HeadAvatar #3DModeling #AvatarGeneration #NeuralImplicitAvatar #EfficientRendering #CVPR2024 #DeepLearning #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/nNG75t6Rc_Coj7Fs658Sw.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/UtYqifxl_uzO07ARNqyWt.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/A0NILX7xpqRhTwWViF9_j.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/MhRNn1SLzXBJglXyF4FR4.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/JUDuoxySx8QylHpvnRjwD.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/j9vYHcLIBA_GGOinDLwuc.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/uRKakPAUDxuBE8QjwDC3H.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/XWUUsfSTSBnGAcYJTFOwI.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/ahyjDg-c9G19hmM5rLrM-.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/eWBeOl0oPw1G6V-TOxrUv.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/D1fN9eq3dOY8EjIbQQKR1.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/-hvBLUL04WlLSSgBqtIAO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/oc3KUiYGc0xNUj7I8jLRo.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/ROssTa6e0Dx11xgOJWeeI.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '🔥', 'users': ['DmitryRyumin', 'samusenps', 'nikgr', 'victor', 'clem', 'merve'], 'count': 6}, {'reaction': '🤯', 'users': ['DmitryRyumin', 'clem', 'merve'], 'count': 3}, {'reaction': '🚀', 'users': ['samusenps', 'merve'], 'count': 2}, {'reaction': '❤️', 'users': ['samusenps'], 'count': 1}]",2024-04-03 10:22:09,2024-04-04 12:51:12.763,[],/posts/DmitryRyumin/763522030257659,1809,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png,151.0,Di Zhang,di-zhang-fdu,375595957908279,"[{'type': 'text', 'value': 'ChemLLM version 1.5 has been open sourced. It has also released a version that has undergone human feedback reinforcement learning. In particular, the front-end and back-end codes have been open sourced simultaneously to facilitate deployment. Everyone is welcome to pay attention.', 'raw': 'ChemLLM version 1.5 has been open sourced. It has also released a version that has undergone human feedback reinforcement learning. In particular, the front-end and back-end codes have been open sourced simultaneously to facilitate deployment. Everyone is welcome to pay attention.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The evaluation set and part of the training set will also be contributed to the community in days, so stay tuned ✧٩(ˊωˋ*)و✧', 'raw': 'The evaluation set and part of the training set will also be contributed to the community in days, so stay tuned ✧٩(ˊωˋ*)و✧'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ChemLLM-1.5', 'raw': 'ChemLLM-1.5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/AI4Chem/ChemLLM-7B-Chat-1.5-DPO', 'raw': 'https://huggingface.co/AI4Chem/ChemLLM-7B-Chat-1.5-DPO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ChemLLM-1.5-dpo ', 'raw': 'ChemLLM-1.5-dpo '}, {'type': 'link', 'href': 'https://huggingface.co/AI4Chem/ChemLLM-7B-Chat-1.5-SFT', 'raw': 'https://huggingface.co/AI4Chem/ChemLLM-7B-Chat-1.5-SFT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Front-end ', 'raw': 'Front-end '}, {'type': 'link', 'href': 'https://github.com/AI4Chem/NextChat', 'raw': 'https://github.com/AI4Chem/NextChat'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RAG application example ', 'raw': 'RAG application example '}, {'type': 'link', 'href': 'https://github.com/AI4Chem/NextChat/blob/main/chatpdf.py', 'raw': 'https://github.com/AI4Chem/NextChat/blob/main/chatpdf.py'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Backend ', 'raw': 'Backend '}, {'type': 'link', 'href': 'https://github.com/AI4Chem/api_backend', 'raw': 'https://github.com/AI4Chem/api_backend'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Front-end Demo ', 'raw': 'Front-end Demo '}, {'type': 'link', 'href': 'https://chemllm.org/', 'raw': 'https://chemllm.org/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'RAG application example Demo ', 'raw': 'RAG application example Demo '}, {'type': 'link', 'href': 'https://chatpdf.chemllm.org/', 'raw': 'https://chatpdf.chemllm.org/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Backend Demo ', 'raw': 'Backend Demo '}, {'type': 'link', 'href': 'https://api.chemllm.org/', 'raw': 'https://api.chemllm.org/'}]","ChemLLM version 1.5 has been open sourced. It has also released a version that has undergone human feedback reinforcement learning. In particular, the front-end and back-end codes have been open sourced simultaneously to facilitate deployment. Everyone is welcome to pay attention. +The evaluation set and part of the training set will also be contributed to the community in days, so stay tuned ✧٩(ˊωˋ*)و✧ + +ChemLLM-1.5 +https://huggingface.co/AI4Chem/ChemLLM-7B-Chat-1.5-DPO + +ChemLLM-1.5-dpo https://huggingface.co/AI4Chem/ChemLLM-7B-Chat-1.5-SFT + +Front-end https://github.com/AI4Chem/NextChat +RAG application example https://github.com/AI4Chem/NextChat/blob/main/chatpdf.py +Backend https://github.com/AI4Chem/api_backend + +Front-end Demo https://chemllm.org/ +RAG application example Demo https://chatpdf.chemllm.org/ +Backend Demo https://api.chemllm.org/",[],[],"[{'reaction': '🤗', 'users': ['di-zhang-fdu', 'osanseviero', 'ajibawa-2023', 'samusenps', 'nikgr', 'clem', 'merve', 'mmhamdy', 'dillfrescott'], 'count': 9}, {'reaction': '🚀', 'users': ['samusenps', 'clem', 'merve', 'dillfrescott'], 'count': 4}, {'reaction': '😎', 'users': ['samusenps', 'clem', 'merve', 'dillfrescott'], 'count': 4}]",2024-04-03 07:48:43,2024-04-14 18:50:10.900,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '64bce15bafd1e46c5504ad38', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64bce15bafd1e46c5504ad38/vkEjiu-mIagKlrXzDH75o.png', 'fullname': 'Di Zhang', 'name': 'di-zhang-fdu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 151, 'isFollowing': False}, {'_id': '625a6e0c535747b1a15be2de', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1652197428055-625a6e0c535747b1a15be2de.jpeg', 'fullname': 'Md. Musfiqur Rahaman', 'name': 'musfiqdehan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}]",/posts/di-zhang-fdu/375595957908279,1913,,15 +https://cdn-avatars.huggingface.co/v1/production/uploads/1648966381588-6064e095abd8d3692e3e2ed6.jpeg,2555.0,Radamés Ajna,radames,844508591713694,"[{'type': 'text', 'value': 'Following up on ', 'raw': 'Following up on '}, {'type': 'mention', 'user': 'vikhyatk', 'raw': '@vikhyatk'}, {'type': 'text', 'value': ""'s Moondream2 update and "", 'raw': ""'s Moondream2 update and ""}, {'type': 'mention', 'user': 'santiagomed', 'raw': '@santiagomed'}, {'type': 'text', 'value': ""'s implementation on Candle, I quickly put togheter the WASM module so that you could try running the ~1.5GB quantized model in the browser. Perhaps the next step is to rewrite it using "", 'raw': ""'s implementation on Candle, I quickly put togheter the WASM module so that you could try running the ~1.5GB quantized model in the browser. Perhaps the next step is to rewrite it using ""}, {'type': 'link', 'href': 'https://github.com/huggingface/ratchet', 'raw': 'https://github.com/huggingface/ratchet'}, {'type': 'text', 'value': ' and run it even faster with WebGPU, ', 'raw': ' and run it even faster with WebGPU, '}, {'type': 'mention', 'user': 'FL33TW00D-HF', 'raw': '@FL33TW00D-HF'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'radames/Candle-Moondream-2'}, 'url': 'https://huggingface.co/spaces/radames/Candle-Moondream-2', 'raw': 'https://huggingface.co/spaces/radames/Candle-Moondream-2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'ps: I have a collection of all Candle WASM demos here ', 'raw': 'ps: I have a collection of all Candle WASM demos here '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'radames/candle-wasm-examples-650898dee13ff96230ce3e1f'}, 'url': 'https://huggingface.co/collections/radames/candle-wasm-examples-650898dee13ff96230ce3e1f', 'raw': 'https://huggingface.co/collections/radames/candle-wasm-examples-650898dee13ff96230ce3e1f'}]","Following up on @vikhyatk's Moondream2 update and @santiagomed's implementation on Candle, I quickly put togheter the WASM module so that you could try running the ~1.5GB quantized model in the browser. Perhaps the next step is to rewrite it using https://github.com/huggingface/ratchet and run it even faster with WebGPU, @FL33TW00D-HF. + +https://huggingface.co/spaces/radames/Candle-Moondream-2 + +ps: I have a collection of all Candle WASM demos here https://huggingface.co/collections/radames/candle-wasm-examples-650898dee13ff96230ce3e1f","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6064e095abd8d3692e3e2ed6/_fO2jLfq4l_37kTcxxbMf.jpeg'}]","[{'_id': '6597e9f42235d4056bc6980a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6597e9f42235d4056bc6980a/6N_Eira5Rj5e8ZdgekKPQ.jpeg', 'fullname': 'Christopher Fleetwood', 'name': 'FL33TW00D-HF', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 121}, {'_id': '64f2c90c1ae35aedb8df2b27', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f2c90c1ae35aedb8df2b27/ng37Jo7jZJ6Ze9JsY9PoN.jpeg', 'fullname': 'Santiago Medina', 'name': 'santiagomed', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 14}, {'_id': '63117568fa95534e218da163', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63117568fa95534e218da163/8h9zN8aKRxPLBnXW7sqY9.jpeg', 'fullname': 'Vik Korrapati', 'name': 'vikhyatk', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 552}]","[{'reaction': '❤️', 'users': ['vikhyatk', 'FL33TW00D-HF', 'samusenps', 'nikgr', 'clem', 'merve', 'Felladrin', 'osanseviero'], 'count': 8}, {'reaction': '🔥', 'users': ['vikhyatk', 'FL33TW00D-HF', 'clem', 'merve', 'kylen', 'osanseviero'], 'count': 6}]",2024-04-03 05:40:50,2024-04-03 05:41:58.367,[],/posts/radames/844508591713694,2789,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1654278567459-626a9bfa03e2e2796f24ca11.jpeg,295.0,Freddy Boulton,freddyaboulton,882087960208908,"[{'type': 'text', 'value': 'Gradio 4.25.0 is out with some nice improvements and bug fixes!', 'raw': 'Gradio 4.25.0 is out with some nice improvements and bug fixes!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧹 Automatic deletion of gr.State variables stored in the server. Never run out of RAM again. Also adds an unload event you can run when a user closes their browser tab.', 'raw': '🧹 Automatic deletion of gr.State variables stored in the server. Never run out of RAM again. Also adds an unload event you can run when a user closes their browser tab.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '😴 Lazy example caching. You can set cache_examples=""lazy"" to cache examples when they\'re first requested as opposed to before the server launches. This can cut down the server\'s start-up time drastically.', 'raw': '😴 Lazy example caching. You can set cache_examples=""lazy"" to cache examples when they\'re first requested as opposed to before the server launches. This can cut down the server\'s start-up time drastically.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔊 Fixes a bug with streaming audio outputs', 'raw': '🔊 Fixes a bug with streaming audio outputs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 Improvements to gr.ChatInterface like pasting images directly from the clipboard.', 'raw': '🤖 Improvements to gr.ChatInterface like pasting images directly from the clipboard.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'See the rest of the changelog here: ', 'raw': 'See the rest of the changelog here: '}, {'type': 'link', 'href': 'https://www.gradio.app/changelog#4-25-0', 'raw': 'https://www.gradio.app/changelog#4-25-0'}]","Gradio 4.25.0 is out with some nice improvements and bug fixes! + +🧹 Automatic deletion of gr.State variables stored in the server. Never run out of RAM again. Also adds an unload event you can run when a user closes their browser tab. + +😴 Lazy example caching. You can set cache_examples=""lazy"" to cache examples when they're first requested as opposed to before the server launches. This can cut down the server's start-up time drastically. + +🔊 Fixes a bug with streaming audio outputs + +🤖 Improvements to gr.ChatInterface like pasting images directly from the clipboard. + +See the rest of the changelog here: https://www.gradio.app/changelog#4-25-0",[],[],"[{'reaction': '❤️', 'users': ['samusenps', 'ysharma', 'not-lain', 'ajibawa-2023', 'radames', 'DmitryRyumin', 'nikgr', 'clem', 'clefourrier', 'akhaliq', 'on1onmangoes'], 'count': 11}, {'reaction': '➕', 'users': ['samusenps', 'ysharma', 'clem', 'abidlabs', 'akhaliq'], 'count': 5}, {'reaction': '🚀', 'users': ['ysharma', 'radames', 'clem', 'abidlabs', 'akhaliq'], 'count': 5}]",2024-04-03 00:25:19,2024-04-03 00:25:19.541,[],/posts/freddyaboulton/882087960208908,2483,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63117568fa95534e218da163/8h9zN8aKRxPLBnXW7sqY9.jpeg,552.0,Vik Korrapati,vikhyatk,579301491905811,"[{'type': 'text', 'value': 'Released a new version of ', 'raw': 'Released a new version of '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'vikhyatk/moondream2'}, 'url': 'https://huggingface.co/vikhyatk/moondream2', 'raw': 'https://huggingface.co/vikhyatk/moondream2'}, {'type': 'text', 'value': ' today! Primarily focused on improving OCR and captioning (e.g. ""Describe this image"", ""Describe this image in one sentence""), but also seeing general improvement across all benchmarks.', 'raw': ' today! Primarily focused on improving OCR and captioning (e.g. ""Describe this image"", ""Describe this image in one sentence""), but also seeing general improvement across all benchmarks.'}]","Released a new version of https://huggingface.co/vikhyatk/moondream2 today! Primarily focused on improving OCR and captioning (e.g. ""Describe this image"", ""Describe this image in one sentence""), but also seeing general improvement across all benchmarks.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63117568fa95534e218da163/WvKqH1EoqmKvlGD3XFtlo.png'}]",[],"[{'reaction': '🔥', 'users': ['not-lain', 'Corianas', 'ajibawa-2023', 'radames', 'DmitryRyumin', 'nikgr', 'clem', 'xingren23', 'dillfrescott', 'aaryanverma', 'linz', 'Jorgegarciairazabal', 'Vlansu'], 'count': 13}, {'reaction': '🚀', 'users': ['not-lain', 'clem', 'sooperDime', 'Sylvestre', 'dillfrescott', 'omaryshchenko', 'Jorgegarciairazabal'], 'count': 7}]",2024-04-02 23:34:11,2024-04-03 05:06:16.754,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/vikhyatk/579301491905811,3361,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1605114051380-noauth.jpeg,315.0,Jeff Boudier,jeffboudier,927093211386293,"[{'type': 'text', 'value': 'These 15 open models are available for serverless inference on Cloudflare Workers AI, powered by GPUs distributed in 150 datacenters globally - 👏 ', 'raw': 'These 15 open models are available for serverless inference on Cloudflare Workers AI, powered by GPUs distributed in 150 datacenters globally - 👏 '}, {'type': 'mention', 'user': 'rita3ko', 'raw': '@rita3ko'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'mchenco', 'raw': '@mchenco'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'jtkipp', 'raw': '@jtkipp'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'nkothariCF', 'raw': '@nkothariCF'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'philschmid', 'raw': '@philschmid'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'Cloudflare/hf-curated-models-available-on-workers-ai-66036e7ad5064318b3e45db6'}, 'url': 'https://huggingface.co/collections/Cloudflare/hf-curated-models-available-on-workers-ai-66036e7ad5064318b3e45db6', 'raw': 'https://huggingface.co/collections/Cloudflare/hf-curated-models-available-on-workers-ai-66036e7ad5064318b3e45db6'}]","These 15 open models are available for serverless inference on Cloudflare Workers AI, powered by GPUs distributed in 150 datacenters globally - 👏 @rita3ko @mchenco @jtkipp @nkothariCF @philschmid + +https://huggingface.co/collections/Cloudflare/hf-curated-models-available-on-workers-ai-66036e7ad5064318b3e45db6","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fac18fb5eec0323e9470ba2/Dd5igw7sDsKjo75l4gLDr.mp4'}]","[{'_id': '63fe3f691193ba2c229ac2e7', 'avatarUrl': '/avatars/55bc85ffea21b0a46fcb3bce77bb4779.svg', 'fullname': 'Jesse Kipp', 'name': 'jtkipp', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}, {'_id': '650bb539ba3bc336065efadd', 'avatarUrl': '/avatars/0c96b358e25d8834c086e3b9b5d543eb.svg', 'fullname': 'Michelle Chen', 'name': 'mchenco', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6}, {'_id': '65416f1741676ceaa2c14c58', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65416f1741676ceaa2c14c58/PJMka1bOP20Jk3ZnnG7us.jpeg', 'fullname': 'Nikhil Kothari', 'name': 'nkothariCF', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5}, {'_id': '5ff5d596f244529b3ec0fb89', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1624629516652-5ff5d596f244529b3ec0fb89.png', 'fullname': 'Philipp Schmid', 'name': 'philschmid', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 873}, {'_id': '642ad04934138fd029ed49a7', 'avatarUrl': '/avatars/a022460e9db28ddf363e65ce3171453b.svg', 'fullname': 'Rita Kozlov', 'name': 'rita3ko', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5}]","[{'reaction': '🔥', 'users': ['rwightman', 'nkothariCF', 'samusenps', 'ajibawa-2023', 'philschmid', 'radames', 'victor', 'nikgr', 'kramp', 'clem', 'Tonic'], 'count': 11}, {'reaction': '❤️', 'users': ['samusenps', 'philschmid', 'radames', 'victor', 'clem', 'Tonic'], 'count': 6}]",2024-04-02 22:57:15,2024-04-02 22:57:15.689,[],/posts/jeffboudier/927093211386293,1903,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/657217faabb25ed8aedd5e48/UUHAXeGtOnQBXFD3nYtf2.jpeg,117.0,Vlad Bogolin,vladbogo,938097945286378,"[{'type': 'text', 'value': 'Anthropic introduces ""Many-shot Jailbreaking"" (MSJ), a new attack on large language models! MSJ exploits long context windows to override safety constraints.', 'raw': 'Anthropic introduces ""Many-shot Jailbreaking"" (MSJ), a new attack on large language models! MSJ exploits long context windows to override safety constraints.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Points:', 'raw': 'Key Points:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Prompts LLMs with hundreds of examples of harmful behavior formatted as a dialogue', 'raw': '* Prompts LLMs with hundreds of examples of harmful behavior formatted as a dialogue'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Generates malicious examples using an uninhibited ""helpful-only"" model', 'raw': '* Generates malicious examples using an uninhibited ""helpful-only"" model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Effective at jailbreaking models like Claude 2.0, GPT-3.5, GPT-4', 'raw': '* Effective at jailbreaking models like Claude 2.0, GPT-3.5, GPT-4'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Standard alignment techniques provide limited protection against long context attacks', 'raw': '* Standard alignment techniques provide limited protection against long context attacks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'link', 'href': 'https://www.anthropic.com/research/many-shot-jailbreaking', 'raw': 'https://www.anthropic.com/research/many-shot-jailbreaking'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More details in my blog: ', 'raw': 'More details in my blog: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/vladbogo/many-shot-jailbreaking', 'raw': 'https://huggingface.co/blog/vladbogo/many-shot-jailbreaking'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congrats to the authors for their work!', 'raw': 'Congrats to the authors for their work!'}]","Anthropic introduces ""Many-shot Jailbreaking"" (MSJ), a new attack on large language models! MSJ exploits long context windows to override safety constraints. + +Key Points: +* Prompts LLMs with hundreds of examples of harmful behavior formatted as a dialogue +* Generates malicious examples using an uninhibited ""helpful-only"" model +* Effective at jailbreaking models like Claude 2.0, GPT-3.5, GPT-4 +* Standard alignment techniques provide limited protection against long context attacks + +Paper: https://www.anthropic.com/research/many-shot-jailbreaking +More details in my blog: https://huggingface.co/blog/vladbogo/many-shot-jailbreaking + +Congrats to the authors for their work!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/W94Ey2-cITOHk--mtzt1J.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/od61V0DfaYOJmFS8GUX8T.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/6_80HmNX86HA5ZCY13HHz.png'}]",[],"[{'reaction': '❤️', 'users': ['samusenps', 'nikgr', 'gruber', 'sauravssss'], 'count': 4}, {'reaction': '👀', 'users': ['monsoon-nlp', 'gruber'], 'count': 2}]",2024-04-02 20:40:06,2024-04-06 15:27:45.140,"[{'_id': '634262af8d8089ebaefd410e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/634262af8d8089ebaefd410e/pcnqe74uMV90K3HVuM76F.png', 'fullname': 'Fizz 🏳️\u200d⚧️', 'name': 'Fizzarolli', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 84, 'isFollowing': False}, {'_id': '64fffae82ad36636be7f3c94', 'avatarUrl': '/avatars/eac1b6903a19bf24057e914b03796f46.svg', 'fullname': 'Saurav singh', 'name': 'sauravssss', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/vladbogo/938097945286378,1828,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,986806198797388,"[{'type': 'text', 'value': '𝟐𝟎𝟐𝟒, 𝐭𝐡𝐞 𝐲𝐞𝐚𝐫 𝐨𝐟 𝐚𝐠𝐞𝐧𝐭 𝐰𝐨𝐫𝐤𝐟𝐥𝐨𝐰𝐬 🔧🦾🤖', 'raw': '𝟐𝟎𝟐𝟒, 𝐭𝐡𝐞 𝐲𝐞𝐚𝐫 𝐨𝐟 𝐚𝐠𝐞𝐧𝐭 𝐰𝐨𝐫𝐤𝐟𝐥𝐨𝐰𝐬 🔧🦾🤖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've just watched Andrew Ng's talk at Sequoia last week."", 'raw': ""I've just watched Andrew Ng's talk at Sequoia last week.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you're interested in Agents, you should really watch it!"", 'raw': ""If you're interested in Agents, you should really watch it!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗪𝗵𝘆 𝘂𝘀𝗲 𝗮𝗴𝗲𝗻𝘁 𝘄𝗼𝗿𝗸𝗳𝗹𝗼𝘄𝘀?', 'raw': '𝗪𝗵𝘆 𝘂𝘀𝗲 𝗮𝗴𝗲𝗻𝘁 𝘄𝗼𝗿𝗸𝗳𝗹𝗼𝘄𝘀?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The current LLM task solving workflow is not very intuitive:', 'raw': 'The current LLM task solving workflow is not very intuitive:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We ask it “write an essay all in one shot, without ever using backspace.”', 'raw': 'We ask it “write an essay all in one shot, without ever using backspace.”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Why not allow the LLM a more similar process to what we would do?', 'raw': 'Why not allow the LLM a more similar process to what we would do?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- “Write an essay outline”', 'raw': '- “Write an essay outline”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- “Do you need wen research?”', 'raw': '- “Do you need wen research?”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- “Write a first draft”', 'raw': '- “Write a first draft”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- “Consider improvements”', 'raw': '- “Consider improvements”'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '…', 'raw': '…'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is called an Agentic workflow. Existing ones bring a huge performance boost. With HumanEval: GPT-4 zero-shot gets 67% score, agentic with either one of tool use or reflection goes over 90%, and the combination of the two scores even higher!', 'raw': 'This is called an Agentic workflow. Existing ones bring a huge performance boost. With HumanEval: GPT-4 zero-shot gets 67% score, agentic with either one of tool use or reflection goes over 90%, and the combination of the two scores even higher!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗔𝗴𝗲𝗻𝘁𝗶𝗰 𝗿𝗲𝗮𝘀𝗼𝗻𝗶𝗻𝗴 𝗱𝗲𝘀𝗶𝗴𝗻 𝗽𝗮𝘁𝘁𝗲𝗿𝗻𝘀', 'raw': '𝗔𝗴𝗲𝗻𝘁𝗶𝗰 𝗿𝗲𝗮𝘀𝗼𝗻𝗶𝗻𝗴 𝗱𝗲𝘀𝗶𝗴𝗻 𝗽𝗮𝘁𝘁𝗲𝗿𝗻𝘀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'On the following two points, the tech is robust:', 'raw': 'On the following two points, the tech is robust:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ 𝗥𝗲𝗳𝗹𝗲𝘅𝗶𝗼𝗻: For instance: add a critic step after the writing step', 'raw': '⚙️ 𝗥𝗲𝗳𝗹𝗲𝘅𝗶𝗼𝗻: For instance: add a critic step after the writing step'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🛠️ 𝗧𝗼𝗼𝗹 𝘂𝘀𝗲: extends the capabilities of the LLM by allowing it to call tools, like search or calculator', 'raw': '🛠️ 𝗧𝗼𝗼𝗹 𝘂𝘀𝗲: extends the capabilities of the LLM by allowing it to call tools, like search or calculator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The next two will be needed to go further, but the tech for them is more emerging and not reliable yet:', 'raw': 'The next two will be needed to go further, but the tech for them is more emerging and not reliable yet:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗺️ 𝗣𝗹𝗮𝗻𝗻𝗶𝗻𝗴 forward to decompose task into subtasks. This allows great behaviours like an AI Agent re-routing after a failure', 'raw': '🗺️ 𝗣𝗹𝗮𝗻𝗻𝗶𝗻𝗴 forward to decompose task into subtasks. This allows great behaviours like an AI Agent re-routing after a failure'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐝 𝗠𝘂𝗹𝘁𝗶-𝗮𝗴𝗲𝗻𝘁 𝗰𝗼𝗹𝗹𝗮𝗯𝗼𝗿𝗮𝘁𝗶𝗼𝗻: Program a flock of agents with tasks.', 'raw': '🐝 𝗠𝘂𝗹𝘁𝗶-𝗮𝗴𝗲𝗻𝘁 𝗰𝗼𝗹𝗹𝗮𝗯𝗼𝗿𝗮𝘁𝗶𝗼𝗻: Program a flock of agents with tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Improving the two above points will unlock huge performance boosts!', 'raw': 'Improving the two above points will unlock huge performance boosts!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Andrew NG says Research agents are already part of his workflow!', 'raw': 'Andrew NG says Research agents are already part of his workflow!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝗖𝗹𝗼𝘀𝗶𝗻𝗴 𝘁𝗵𝗼𝘂𝗴𝗵𝘁𝘀', 'raw': '𝗖𝗹𝗼𝘀𝗶𝗻𝗴 𝘁𝗵𝗼𝘂𝗴𝗵𝘁𝘀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Andrew speculates that through agentic workflows, maybe generating many tokens fast from a small LLM will give better results than slower throughput from a powerful LLM like GPT-5.', 'raw': 'Andrew speculates that through agentic workflows, maybe generating many tokens fast from a small LLM will give better results than slower throughput from a powerful LLM like GPT-5.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🎬 Watch the talk here 👉 ', 'raw': '🎬 Watch the talk here 👉 '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=sal78ACtGTc', 'raw': 'https://www.youtube.com/watch?v=sal78ACtGTc'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""📚 I've added his recommended reads to "", 'raw': ""📚 I've added his recommended reads to ""}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'm-ric/agents-65ba776fbd9e29f771c07d4e'}, 'url': 'https://huggingface.co/collections/m-ric/agents-65ba776fbd9e29f771c07d4e', 'raw': 'https://huggingface.co/collections/m-ric/agents-65ba776fbd9e29f771c07d4e'}]","𝟐𝟎𝟐𝟒, 𝐭𝐡𝐞 𝐲𝐞𝐚𝐫 𝐨𝐟 𝐚𝐠𝐞𝐧𝐭 𝐰𝐨𝐫𝐤𝐟𝐥𝐨𝐰𝐬 🔧🦾🤖 + +I've just watched Andrew Ng's talk at Sequoia last week. +If you're interested in Agents, you should really watch it! + +𝗪𝗵𝘆 𝘂𝘀𝗲 𝗮𝗴𝗲𝗻𝘁 𝘄𝗼𝗿𝗸𝗳𝗹𝗼𝘄𝘀? +The current LLM task solving workflow is not very intuitive: +We ask it “write an essay all in one shot, without ever using backspace.” + +Why not allow the LLM a more similar process to what we would do? +- “Write an essay outline” +- “Do you need wen research?” +- “Write a first draft” +- “Consider improvements” +… + +This is called an Agentic workflow. Existing ones bring a huge performance boost. With HumanEval: GPT-4 zero-shot gets 67% score, agentic with either one of tool use or reflection goes over 90%, and the combination of the two scores even higher! + +𝗔𝗴𝗲𝗻𝘁𝗶𝗰 𝗿𝗲𝗮𝘀𝗼𝗻𝗶𝗻𝗴 𝗱𝗲𝘀𝗶𝗴𝗻 𝗽𝗮𝘁𝘁𝗲𝗿𝗻𝘀 +On the following two points, the tech is robust: + +⚙️ 𝗥𝗲𝗳𝗹𝗲𝘅𝗶𝗼𝗻: For instance: add a critic step after the writing step +🛠️ 𝗧𝗼𝗼𝗹 𝘂𝘀𝗲: extends the capabilities of the LLM by allowing it to call tools, like search or calculator + +The next two will be needed to go further, but the tech for them is more emerging and not reliable yet: +🗺️ 𝗣𝗹𝗮𝗻𝗻𝗶𝗻𝗴 forward to decompose task into subtasks. This allows great behaviours like an AI Agent re-routing after a failure +🐝 𝗠𝘂𝗹𝘁𝗶-𝗮𝗴𝗲𝗻𝘁 𝗰𝗼𝗹𝗹𝗮𝗯𝗼𝗿𝗮𝘁𝗶𝗼𝗻: Program a flock of agents with tasks. +Improving the two above points will unlock huge performance boosts! + +Andrew NG says Research agents are already part of his workflow! + +𝗖𝗹𝗼𝘀𝗶𝗻𝗴 𝘁𝗵𝗼𝘂𝗴𝗵𝘁𝘀 +Andrew speculates that through agentic workflows, maybe generating many tokens fast from a small LLM will give better results than slower throughput from a powerful LLM like GPT-5. + +🎬 Watch the talk here 👉 https://www.youtube.com/watch?v=sal78ACtGTc +📚 I've added his recommended reads to https://huggingface.co/collections/m-ric/agents-65ba776fbd9e29f771c07d4e",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'clem', 'neelam91721', 'samusenps', 'nikgr', 'JoPmt', 'Moibe', 'abidlabs', 'poGlingus'], 'count': 9}, {'reaction': '👍', 'users': ['apol', 'abidlabs'], 'count': 2}]",2024-04-02 16:08:03,2024-04-03 08:45:03.935,"[{'_id': '61b9df9b22e5b0fdd501a113', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b9df9b22e5b0fdd501a113/i2yTGbK7pFnw9YLwZ7elp.jpeg', 'fullname': 'Akhil B', 'name': 'hakunamatata1997', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}]",/posts/m-ric/986806198797388,1884,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,704809400668436,"[{'type': 'text', 'value': 'Aurora-M', 'raw': 'Aurora-M'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The First Open Source Multilingual Language Model Red-teamed according to the U.S. Executive Order', 'raw': 'The First Open Source Multilingual Language Model Red-teamed according to the U.S. Executive Order'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2404.00399'}, 'url': 'https://huggingface.co/papers/2404.00399', 'raw': 'https://huggingface.co/papers/2404.00399', 'label': 'Aurora-M: The First Open Source Multilingual Language Model Red-teamed\n according to the U.S. Executive Order (2404.00399)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pretrained language models underpin several AI applications, but their high computational cost for training limits accessibility. Initiatives such as BLOOM and StarCoder aim to democratize access to pretrained models for collaborative community development. However, such existing models face challenges: limited multilingual capabilities, continual pretraining causing catastrophic forgetting, whereas pretraining from scratch is computationally expensive, and compliance with AI safety and development laws. This paper presents Aurora-M, a 15B parameter multilingual open-source model trained on English, Finnish, Hindi, Japanese, Vietnamese, and code. Continually pretrained from StarCoderPlus on 435 billion additional tokens, Aurora-M surpasses 2 trillion tokens in total training token count. It is the first open-source multilingual model fine-tuned on human-reviewed safety instructions, thus aligning its development not only with conventional red-teaming considerations, but also with the specific concerns articulated in the Biden-Harris Executive Order on the Safe, Secure, and Trustworthy Development and Use of Artificial Intelligence. Aurora-M is rigorously evaluated across various tasks and languages, demonstrating robustness against catastrophic forgetting and outperforming alternatives in multilingual settings, particularly in safety evaluations. ', 'raw': 'Pretrained language models underpin several AI applications, but their high computational cost for training limits accessibility. Initiatives such as BLOOM and StarCoder aim to democratize access to pretrained models for collaborative community development. However, such existing models face challenges: limited multilingual capabilities, continual pretraining causing catastrophic forgetting, whereas pretraining from scratch is computationally expensive, and compliance with AI safety and development laws. This paper presents Aurora-M, a 15B parameter multilingual open-source model trained on English, Finnish, Hindi, Japanese, Vietnamese, and code. Continually pretrained from StarCoderPlus on 435 billion additional tokens, Aurora-M surpasses 2 trillion tokens in total training token count. It is the first open-source multilingual model fine-tuned on human-reviewed safety instructions, thus aligning its development not only with conventional red-teaming considerations, but also with the specific concerns articulated in the Biden-Harris Executive Order on the Safe, Secure, and Trustworthy Development and Use of Artificial Intelligence. Aurora-M is rigorously evaluated across various tasks and languages, demonstrating robustness against catastrophic forgetting and outperforming alternatives in multilingual settings, particularly in safety evaluations. '}, {'type': 'new_line', 'raw': '\n'}]","Aurora-M + +The First Open Source Multilingual Language Model Red-teamed according to the U.S. Executive Order + +https://huggingface.co/papers/2404.00399 + +Pretrained language models underpin several AI applications, but their high computational cost for training limits accessibility. Initiatives such as BLOOM and StarCoder aim to democratize access to pretrained models for collaborative community development. However, such existing models face challenges: limited multilingual capabilities, continual pretraining causing catastrophic forgetting, whereas pretraining from scratch is computationally expensive, and compliance with AI safety and development laws. This paper presents Aurora-M, a 15B parameter multilingual open-source model trained on English, Finnish, Hindi, Japanese, Vietnamese, and code. Continually pretrained from StarCoderPlus on 435 billion additional tokens, Aurora-M surpasses 2 trillion tokens in total training token count. It is the first open-source multilingual model fine-tuned on human-reviewed safety instructions, thus aligning its development not only with conventional red-teaming considerations, but also with the specific concerns articulated in the Biden-Harris Executive Order on the Safe, Secure, and Trustworthy Development and Use of Artificial Intelligence. Aurora-M is rigorously evaluated across various tasks and languages, demonstrating robustness against catastrophic forgetting and outperforming alternatives in multilingual settings, particularly in safety evaluations. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/7C3ctlbiugipQMW9FLjpJ.png'}]",[],"[{'reaction': '👀', 'users': ['osanseviero', 'CulturedMan', 'DmitryRyumin', 'samusenps', 'clem', 'Catliba', 'mayank-mishra', 'sugatoray'], 'count': 8}, {'reaction': '❤️', 'users': ['huu-ontocord', 'Xa9aX', 'mayank-mishra', 'ajibawa-2023', 'alielfilali01', 'jayomb'], 'count': 6}]",2024-04-02 16:03:20,2024-04-02 16:03:33.451,[],/posts/akhaliq/704809400668436,2233,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,292464465490361,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Have Faith in Faithfulness: Going Beyond Circuit Overlap When Finding Model Mechanisms by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Have Faith in Faithfulness: Going Beyond Circuit Overlap When Finding Model Mechanisms by ""}, {'type': 'mention', 'user': 'mwhanna', 'raw': '@mwhanna'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'sandropezzelle', 'raw': '@sandropezzelle'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'belinkov', 'raw': '@belinkov'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Edge attribution patching (EAP) is a circuit discovery technique using gradients to approximate the effects of causal intervening on each model edge. In the literature, its effectiveness is validated by comparing the overlap of its resulting circuits with those found via causal interventions (much more expensive).', 'raw': 'Edge attribution patching (EAP) is a circuit discovery technique using gradients to approximate the effects of causal intervening on each model edge. In the literature, its effectiveness is validated by comparing the overlap of its resulting circuits with those found via causal interventions (much more expensive).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This work:', 'raw': 'This work:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Proposes a new method for faithful and efficient circuit discovery named edge attribution patching with integrated gradients (EAP-IG)', 'raw': '1. Proposes a new method for faithful and efficient circuit discovery named edge attribution patching with integrated gradients (EAP-IG)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Evaluates the faithfulness EAP, EAP-IG and activation patching, i.e. whether behavior of the model remains consistent after all non-circuit edges are ablated.', 'raw': '2. Evaluates the faithfulness EAP, EAP-IG and activation patching, i.e. whether behavior of the model remains consistent after all non-circuit edges are ablated.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Highlights that, while the no-overlap and full-overlap of EAP-like methods with activation patching results are generally good indicators of unfaithful and faithful (respectively) circuit identification, circuits with moderate overlap cannot generally assumed to be faithful to model behavior.', 'raw': '3. Highlights that, while the no-overlap and full-overlap of EAP-like methods with activation patching results are generally good indicators of unfaithful and faithful (respectively) circuit identification, circuits with moderate overlap cannot generally assumed to be faithful to model behavior.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'An advantage of EAP-IG is enabling the usage of KL-Divergence as a target for gradient propagation, which is not possible in the case of raw gradient-based EAP. ', 'raw': 'An advantage of EAP-IG is enabling the usage of KL-Divergence as a target for gradient propagation, which is not possible in the case of raw gradient-based EAP. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'EAP-IG runtime is approximately similar to the one of EAP, with a small number of steps to approximate the gradient integral.', 'raw': 'EAP-IG runtime is approximately similar to the one of EAP, with a small number of steps to approximate the gradient integral.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Importantly, circuit faithfulness does not imply completeness, i.e. whether all components participating towards a specific task were accounted for. This aspect is identified as interesting for future work.', 'raw': 'Importantly, circuit faithfulness does not imply completeness, i.e. whether all components participating towards a specific task were accounted for. This aspect is identified as interesting for future work.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.17806'}, 'url': 'https://huggingface.co/papers/2403.17806', 'raw': 'https://huggingface.co/papers/2403.17806', 'label': 'Have Faith in Faithfulness: Going Beyond Circuit Overlap When Finding\n Model Mechanisms (2403.17806)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 All daily picks: ', 'raw': '🔍 All daily picks: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9', 'raw': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Have Faith in Faithfulness: Going Beyond Circuit Overlap When Finding Model Mechanisms by @mwhanna @sandropezzelle @belinkov + +Edge attribution patching (EAP) is a circuit discovery technique using gradients to approximate the effects of causal intervening on each model edge. In the literature, its effectiveness is validated by comparing the overlap of its resulting circuits with those found via causal interventions (much more expensive). + +This work: + +1. Proposes a new method for faithful and efficient circuit discovery named edge attribution patching with integrated gradients (EAP-IG) +2. Evaluates the faithfulness EAP, EAP-IG and activation patching, i.e. whether behavior of the model remains consistent after all non-circuit edges are ablated. +3. Highlights that, while the no-overlap and full-overlap of EAP-like methods with activation patching results are generally good indicators of unfaithful and faithful (respectively) circuit identification, circuits with moderate overlap cannot generally assumed to be faithful to model behavior. + +An advantage of EAP-IG is enabling the usage of KL-Divergence as a target for gradient propagation, which is not possible in the case of raw gradient-based EAP. + +EAP-IG runtime is approximately similar to the one of EAP, with a small number of steps to approximate the gradient integral. + +Importantly, circuit faithfulness does not imply completeness, i.e. whether all components participating towards a specific task were accounted for. This aspect is identified as interesting for future work. + +📄 Paper: https://huggingface.co/papers/2403.17806 + +🔍 All daily picks: https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/4_nMQgwlvRITNWQLO4w86.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/TXwT9VOLdivdURyZj6gXW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/9MTGZKoxEpaQZoGqrMrL-.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/j2eqlu8q1qD3G8MGc3K0I.png'}]","[{'_id': '614c57f1ee44bcfe57b366d6', 'avatarUrl': '/avatars/186a9aed84681246f48ed2a012c50def.svg', 'fullname': 'Yonatan Belinkov', 'name': 'belinkov', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '6336fa69be97f1977e796c42', 'avatarUrl': '/avatars/cee8dc17ab89b050a862dee09a0a0195.svg', 'fullname': 'Michael Hanna', 'name': 'mwhanna', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '625523da03005d0910ba6776', 'avatarUrl': '/avatars/cfd43208990ffbcf71a37fd396b7f11c.svg', 'fullname': 'Sandro Pezzelle', 'name': 'sandropezzelle', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}]","[{'reaction': '❤️', 'users': ['javifer', 'osanseviero', 'samusenps', 'clem', 'giux78', 'mmhamdy', 'alemiaschi'], 'count': 7}, {'reaction': '👍', 'users': ['shiv2050'], 'count': 1}]",2024-03-27 09:34:06,2024-03-27 09:34:06.994,[],/posts/gsarti/292464465490361,1273,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5fef4eb7770b06e11c2c6381/1NMdigjCGtn0yvQZSi5NJ.png,64.0,Alessandro Ercolani,giux78,285448939482538,"[{'type': 'text', 'value': 'Based on the work of ', 'raw': 'Based on the work of '}, {'type': 'mention', 'user': 'mrinaldi', 'raw': '@mrinaldi'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'ruggsea', 'raw': '@ruggsea'}, {'type': 'text', 'value': ' we just released the biggest - ready for training - conversational dataset based on Usenet data in the Italian language 🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹. It contains about 9 millions of conversations made by real humans. ', 'raw': ' we just released the biggest - ready for training - conversational dataset based on Usenet data in the Italian language 🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹. It contains about 9 millions of conversations made by real humans. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'mii-community/UsenetArchiveIT-conversations'}, 'url': 'https://huggingface.co/datasets/mii-community/UsenetArchiveIT-conversations', 'raw': 'https://huggingface.co/datasets/mii-community/UsenetArchiveIT-conversations'}, {'type': 'new_line', 'raw': '\n'}]","Based on the work of @mrinaldi and @ruggsea we just released the biggest - ready for training - conversational dataset based on Usenet data in the Italian language 🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹🇮🇹. It contains about 9 millions of conversations made by real humans. + +https://huggingface.co/datasets/mii-community/UsenetArchiveIT-conversations +",[],"[{'_id': '65c27751d2fbc4e846637421', 'avatarUrl': '/avatars/641876a24d4ee45dab0f9723d7b9e7f1.svg', 'fullname': 'Matteo Rinaldi', 'name': 'mrinaldi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7}, {'_id': '635f0b99cf0289e13c487bee', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/635f0b99cf0289e13c487bee/XieL1fCJkYToy5OGPPRy_.jpeg', 'fullname': 'Ruggero Marino Lazzaroni', 'name': 'ruggsea', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 17}]","[{'reaction': '👍', 'users': ['mrinaldi', 'gsarti', 'fgamezf', 'osanseviero', 'giux78', 'merve', 'clem'], 'count': 7}, {'reaction': '❤️', 'users': ['gsarti', 'osanseviero', 'merve', 'samusenps', 'clem', 'giux78'], 'count': 6}]",2024-03-27 07:54:41,2024-03-27 08:15:52.907,[],/posts/giux78/285448939482538,1291,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg,210.0,Sebastian Gabarain,Locutusque,826730657864057,"[{'type': 'text', 'value': ""Exciting news! 🎉 I've created the OpenCerebrum datasets, open-source alternatives to Aether Research's proprietary Cerebrum dataset."", 'raw': ""Exciting news! 🎉 I've created the OpenCerebrum datasets, open-source alternatives to Aether Research's proprietary Cerebrum dataset.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The first, OpenCerebrum SFT, is a text-generation and question-answering dataset with ~1.2M examples, curated from sources like Open-Orca, glaiveai, camel-ai, and more! 📚', 'raw': 'The first, OpenCerebrum SFT, is a text-generation and question-answering dataset with ~1.2M examples, curated from sources like Open-Orca, glaiveai, camel-ai, and more! 📚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The second, OpenCerebrum DPO, is a smaller dataset with ~21k examples, focusing on data point optimization. It's curated from sources like jondurbin, argilla, grimulkan, and others. 📊"", 'raw': ""The second, OpenCerebrum DPO, is a smaller dataset with ~21k examples, focusing on data point optimization. It's curated from sources like jondurbin, argilla, grimulkan, and others. 📊""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Both datasets are licensed under Apache-2.0 and are available in English. They're ready for use in your projects, and I welcome any feedback for future improvements! 🚀"", 'raw': ""Both datasets are licensed under Apache-2.0 and are available in English. They're ready for use in your projects, and I welcome any feedback for future improvements! 🚀""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Locutusque/OpenCerebrum-dpo'}, 'url': 'https://huggingface.co/datasets/Locutusque/OpenCerebrum-dpo', 'raw': 'https://huggingface.co/datasets/Locutusque/OpenCerebrum-dpo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Locutusque/OpenCerebrum-SFT'}, 'url': 'https://huggingface.co/datasets/Locutusque/OpenCerebrum-SFT', 'raw': 'https://huggingface.co/datasets/Locutusque/OpenCerebrum-SFT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Locutusque/OpenCerebrum-1.0-7b-SFT'}, 'url': 'https://huggingface.co/Locutusque/OpenCerebrum-1.0-7b-SFT', 'raw': 'https://huggingface.co/Locutusque/OpenCerebrum-1.0-7b-SFT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Locutusque/OpenCerebrum-1.0-7b-DPO'}, 'url': 'https://huggingface.co/Locutusque/OpenCerebrum-1.0-7b-DPO', 'raw': 'https://huggingface.co/Locutusque/OpenCerebrum-1.0-7b-DPO'}]","Exciting news! 🎉 I've created the OpenCerebrum datasets, open-source alternatives to Aether Research's proprietary Cerebrum dataset. + +The first, OpenCerebrum SFT, is a text-generation and question-answering dataset with ~1.2M examples, curated from sources like Open-Orca, glaiveai, camel-ai, and more! 📚 + +The second, OpenCerebrum DPO, is a smaller dataset with ~21k examples, focusing on data point optimization. It's curated from sources like jondurbin, argilla, grimulkan, and others. 📊 + +Both datasets are licensed under Apache-2.0 and are available in English. They're ready for use in your projects, and I welcome any feedback for future improvements! 🚀 + +https://huggingface.co/datasets/Locutusque/OpenCerebrum-dpo +https://huggingface.co/datasets/Locutusque/OpenCerebrum-SFT +https://huggingface.co/Locutusque/OpenCerebrum-1.0-7b-SFT +https://huggingface.co/Locutusque/OpenCerebrum-1.0-7b-DPO",[],[],"[{'reaction': '❤️', 'users': ['afrideva', 'samusenps', 'ajibawa-2023', 'giux78', 'osanseviero', 'lewtun', 'dvilasuero', 'lhoestq', 'victor', 'Tom-Neverwinter', 'InferenceIllusionist', 'MexIvanov', 'clefourrier', 'mlabonne'], 'count': 14}, {'reaction': '👍', 'users': ['monsoon-nlp', 'Xaln', 'osanseviero', 'lewtun', 'dvilasuero', 'Tom-Neverwinter', 'shiv2050'], 'count': 7}, {'reaction': '🤝', 'users': ['samusenps', 'osanseviero', 'lewtun', 'dvilasuero', 'Tom-Neverwinter'], 'count': 5}, {'reaction': '🔥', 'users': ['lhoestq', 'ToKrCZ', 'Tom-Neverwinter', 'mlabonne'], 'count': 4}]",2024-03-27 00:07:00,2024-03-27 14:01:49.658,"[{'_id': '634262af8d8089ebaefd410e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/634262af8d8089ebaefd410e/pcnqe74uMV90K3HVuM76F.png', 'fullname': 'Fizz 🏳️\u200d⚧️', 'name': 'Fizzarolli', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 84, 'isFollowing': False}, {'_id': '6437292ecd93f4c9a34b0d47', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg', 'fullname': 'Sebastian Gabarain', 'name': 'Locutusque', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 210, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '5f0c746619cb630495b814fd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594651707950-noauth.jpeg', 'fullname': 'Lewis Tunstall', 'name': 'lewtun', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1028, 'isFollowing': False}]",/posts/Locutusque/826730657864057,2651,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/62cd5057674cdb524450093d/f67rlrdsKPRTLdXCXoa_X.jpeg,64.0,Mayank Mishra,mayank-mishra,736768871803868,"[{'type': 'text', 'value': 'Current LLMs are very susceptible to generating toxic, harmful and even dangerous content. They can also generate outputs with gender or racial biases.', 'raw': 'Current LLMs are very susceptible to generating toxic, harmful and even dangerous content. They can also generate outputs with gender or racial biases.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Biden-Harris Executive Order (', 'raw': 'The Biden-Harris Executive Order ('}, {'type': 'link', 'href': 'https://www.federalregister.gov/documents/2023/11/01/2023-24283/safe-secure-and-trustworthy-development-and-use-of-artificial-intelligence', 'raw': 'https://www.federalregister.gov/documents/2023/11/01/2023-24283/safe-secure-and-trustworthy-development-and-use-of-artificial-intelligence'}, {'type': 'text', 'value': ') sets forth guidelines on what is considered a safe AI system.', 'raw': ') sets forth guidelines on what is considered a safe AI system.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Following up on these guidelines, we present the world's first open source Biden-Harris Executive Order Red teamed Multilingual Language Model: Aurora-M."", 'raw': ""Following up on these guidelines, we present the world's first open source Biden-Harris Executive Order Red teamed Multilingual Language Model: Aurora-M.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The model is trained on 5 languages: English, Hindi, Japanese, Vietnamese and Finnish.', 'raw': 'The model is trained on 5 languages: English, Hindi, Japanese, Vietnamese and Finnish.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/mayank-mishra/aurora', 'raw': 'https://huggingface.co/blog/mayank-mishra/aurora'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper coming out soon.', 'raw': 'Paper coming out soon.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Base model: ', 'raw': 'Base model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'aurora-m/aurora-m-base'}, 'url': 'https://huggingface.co/aurora-m/aurora-m-base', 'raw': 'https://huggingface.co/aurora-m/aurora-m-base'}, {'type': 'text', 'value': ' (not safety tuned)', 'raw': ' (not safety tuned)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Instruct model: ', 'raw': 'Instruct model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'aurora-m/aurora-m-instruct'}, 'url': 'https://huggingface.co/aurora-m/aurora-m-instruct', 'raw': 'https://huggingface.co/aurora-m/aurora-m-instruct'}, {'type': 'text', 'value': ' (not safety tuned)', 'raw': ' (not safety tuned)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Red teamed model: ', 'raw': 'Red teamed model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'aurora-m/aurora-m-biden-harris-redteamed'}, 'url': 'https://huggingface.co/aurora-m/aurora-m-biden-harris-redteamed', 'raw': 'https://huggingface.co/aurora-m/aurora-m-biden-harris-redteamed'}, {'type': 'text', 'value': ' (safety tuned according to the order mentioned above)', 'raw': ' (safety tuned according to the order mentioned above)'}]","Current LLMs are very susceptible to generating toxic, harmful and even dangerous content. They can also generate outputs with gender or racial biases. + +The Biden-Harris Executive Order (https://www.federalregister.gov/documents/2023/11/01/2023-24283/safe-secure-and-trustworthy-development-and-use-of-artificial-intelligence) sets forth guidelines on what is considered a safe AI system. + +Following up on these guidelines, we present the world's first open source Biden-Harris Executive Order Red teamed Multilingual Language Model: Aurora-M. + +The model is trained on 5 languages: English, Hindi, Japanese, Vietnamese and Finnish. + +Blog: https://huggingface.co/blog/mayank-mishra/aurora +Paper coming out soon. + +Base model: https://huggingface.co/aurora-m/aurora-m-base (not safety tuned) +Instruct model: https://huggingface.co/aurora-m/aurora-m-instruct (not safety tuned) +Red teamed model: https://huggingface.co/aurora-m/aurora-m-biden-harris-redteamed (safety tuned according to the order mentioned above)",[],[],"[{'reaction': '😔', 'users': ['samusenps', 'oneiroid', 'clem', 'lunarflu', 'antiven0m', 'adamelliotfields', 'Joseph717171'], 'count': 7}, {'reaction': '🔥', 'users': ['samusenps', 'mayank-mishra', 'clem', 'diwank', 'lunarflu', 'sted97'], 'count': 6}, {'reaction': '👀', 'users': ['samusenps', 'monsoon-nlp', 'lunarflu'], 'count': 3}, {'reaction': '👍', 'users': ['shiv2050'], 'count': 1}]",2024-03-26 23:17:27,2024-03-27 06:32:58.201,[],/posts/mayank-mishra/736768871803868,1905,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/657217faabb25ed8aedd5e48/UUHAXeGtOnQBXFD3nYtf2.jpeg,117.0,Vlad Bogolin,vladbogo,529620339719015,"[{'type': 'text', 'value': 'A new paper introduces Visual CoT, a new approach that enhances multi-modal large language models with visual chain-of-thought reasoning capabilities. This allows language models to dynamically identify and focus on specific regions within images that are most relevant for answering questions, mimicking human-like efficient visual reasoning.', 'raw': 'A new paper introduces Visual CoT, a new approach that enhances multi-modal large language models with visual chain-of-thought reasoning capabilities. This allows language models to dynamically identify and focus on specific regions within images that are most relevant for answering questions, mimicking human-like efficient visual reasoning.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Keypoints:', 'raw': 'Keypoints:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Introduces the 373k Visual CoT dataset with bounding box annotations highlighting essential image regions', 'raw': '* Introduces the 373k Visual CoT dataset with bounding box annotations highlighting essential image regions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Proposes a multi-turn pipeline for focusing on relevant visual inputs', 'raw': '* Proposes a multi-turn pipeline for focusing on relevant visual inputs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Achieves strong results on multi-modal benchmarks ', 'raw': '* Achieves strong results on multi-modal benchmarks '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.16999'}, 'url': 'https://huggingface.co/papers/2403.16999', 'raw': 'https://huggingface.co/papers/2403.16999', 'label': 'Visual CoT: Unleashing Chain-of-Thought Reasoning in Multi-Modal\n Language Models (2403.16999)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code, data and other resources: ', 'raw': 'Code, data and other resources: '}, {'type': 'link', 'href': 'https://github.com/deepcs233/Visual-CoT', 'raw': 'https://github.com/deepcs233/Visual-CoT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congrats to the authors for their work!', 'raw': 'Congrats to the authors for their work!'}]","A new paper introduces Visual CoT, a new approach that enhances multi-modal large language models with visual chain-of-thought reasoning capabilities. This allows language models to dynamically identify and focus on specific regions within images that are most relevant for answering questions, mimicking human-like efficient visual reasoning. + +Keypoints: +* Introduces the 373k Visual CoT dataset with bounding box annotations highlighting essential image regions +* Proposes a multi-turn pipeline for focusing on relevant visual inputs +* Achieves strong results on multi-modal benchmarks + +Paper: https://huggingface.co/papers/2403.16999 +Code, data and other resources: https://github.com/deepcs233/Visual-CoT + +Congrats to the authors for their work!",[],[],"[{'reaction': '❤️', 'users': ['samusenps', 'Csplk', 'osanseviero', 'thesouthfrog', 'Deping', 'sbrandeis'], 'count': 6}, {'reaction': '➕', 'users': ['samusenps', 'osanseviero'], 'count': 2}]",2024-03-26 22:20:59,2024-03-26 22:20:59.712,[],/posts/vladbogo/529620339719015,1394,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,751873931507932,"[{'type': 'text', 'value': '𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗯𝗲𝗮𝗺 𝘀𝗲𝗮𝗿𝗰𝗵 𝗱𝗲𝗰𝗼𝗱𝗶𝗻𝗴 𝘄𝗼𝗿𝗸? ➡️ 𝙉𝙚𝙬 𝙫𝙞𝙨𝙪𝙖𝙡𝙞𝙯𝙖𝙩𝙞𝙤𝙣 𝙩���𝙤𝙡! 👀', 'raw': '𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗯𝗲𝗮𝗺 𝘀𝗲𝗮𝗿𝗰𝗵 𝗱𝗲𝗰𝗼𝗱𝗶𝗻𝗴 𝘄𝗼𝗿𝗸? ➡️ 𝙉𝙚𝙬 𝙫𝙞𝙨𝙪𝙖𝙡𝙞𝙯𝙖𝙩𝙞𝙤𝙣 𝙩𝙤𝙤𝙡! 👀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In Decoder-type LLMs like GPT4 or Mistral-Large, the output is generated one token (=word part) at a time. That\'s why they\'re nicknamed ""stochastic parrots"": the ""thinking"" process only happens one step at a time, so it can seem really myopic.', 'raw': 'In Decoder-type LLMs like GPT4 or Mistral-Large, the output is generated one token (=word part) at a time. That\'s why they\'re nicknamed ""stochastic parrots"": the ""thinking"" process only happens one step at a time, so it can seem really myopic.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐒𝐨 𝐡𝐨𝐰 𝐢𝐬 𝐭𝐡𝐞 𝐧𝐞𝐱𝐭 𝐭𝐨𝐤𝐞𝐧 𝐬𝐞𝐥𝐞𝐜𝐭𝐞𝐝?', 'raw': '𝐒𝐨 𝐡𝐨𝐰 𝐢𝐬 𝐭𝐡𝐞 𝐧𝐞𝐱𝐭 𝐭𝐨𝐤𝐞𝐧 𝐬𝐞𝐥𝐞𝐜𝐭𝐞𝐝?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📊 Given its input sentence like ""𝘞𝘩𝘢𝘵 𝘪𝘴 𝘵𝘩𝘦 7𝘵𝘩 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳? 𝘛𝘩𝘦 7𝘵𝘩 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳"", the Decoder LLM generates, for each token in its vocabulary, a score that represents this token\'s probability of coming next.', 'raw': '📊 Given its input sentence like ""𝘞𝘩𝘢𝘵 𝘪𝘴 𝘵𝘩𝘦 7𝘵𝘩 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳? 𝘛𝘩𝘦 7𝘵𝘩 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳"", the Decoder LLM generates, for each token in its vocabulary, a score that represents this token\'s probability of coming next.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For instance: ""𝙞𝙨"" gets score 0.56, and ""𝙘𝙖𝙣"" gets score 0.35.', 'raw': 'For instance: ""𝙞𝙨"" gets score 0.56, and ""𝙘𝙖𝙣"" gets score 0.35.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤑 𝐆𝐫𝐞𝐞𝐝𝐲 𝐝𝐞𝐜𝐨𝐝𝐢𝐧𝐠 is the naive option where you simply take the next most probable token at each step. But this creates paths that maximize very short-term rewards, thus may overlook better paths for the long term (like this time when you played FIFA all evening and arrived unprepared to your school exam on the next day).', 'raw': '🤑 𝐆𝐫𝐞𝐞𝐝𝐲 𝐝𝐞𝐜𝐨𝐝𝐢𝐧𝐠 is the naive option where you simply take the next most probable token at each step. But this creates paths that maximize very short-term rewards, thus may overlook better paths for the long term (like this time when you played FIFA all evening and arrived unprepared to your school exam on the next day).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In our example, the next highest score token might be ""𝙞𝙨"", but this will strongly bias the LLM towards giving an hasty response. On the opposite, starting with ""𝙘𝙖𝙣"" could have been completed with ""𝘣𝘦 𝘰𝘣𝘵𝘢𝘪𝘯𝘦𝘥 𝘧𝘳𝘰𝘮 𝘤𝘰𝘮𝘱𝘶𝘵𝘪𝘯𝘨 𝘱𝘳𝘦𝘷𝘪𝘰𝘶𝘴 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳𝘴 𝘧𝘪𝘳𝘴𝘵"", which steers the LLM towards a correct reasoning!', 'raw': 'In our example, the next highest score token might be ""𝙞𝙨"", but this will strongly bias the LLM towards giving an hasty response. On the opposite, starting with ""𝙘𝙖𝙣"" could have been completed with ""𝘣𝘦 𝘰𝘣𝘵𝘢𝘪𝘯𝘦𝘥 𝘧𝘳𝘰𝘮 𝘤𝘰𝘮𝘱𝘶𝘵𝘪𝘯𝘨 𝘱𝘳𝘦𝘷𝘪𝘰𝘶𝘴 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳𝘴 𝘧𝘪𝘳𝘴𝘵"", which steers the LLM towards a correct reasoning!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗺️ 𝐁𝐞𝐚𝐦 𝐬𝐞𝐚𝐫𝐜𝐡 improves on greedy decoding by generating at each step several paths - called beams - instead of one. This allows the generation to explore a much larger space, thus find better completions. In our example, both the ""𝙞𝙨"" and the ""𝙘𝙖𝙣"" completion could be tested. ✅', 'raw': '🗺️ 𝐁𝐞𝐚𝐦 𝐬𝐞𝐚𝐫𝐜𝐡 improves on greedy decoding by generating at each step several paths - called beams - instead of one. This allows the generation to explore a much larger space, thus find better completions. In our example, both the ""𝙞𝙨"" and the ""𝙘𝙖𝙣"" completion could be tested. ✅'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""👉 I've created a tool to let you visualize it, thank you "", 'raw': ""👉 I've created a tool to let you visualize it, thank you ""}, {'type': 'mention', 'user': 'joaogante', 'raw': '@joaogante'}, {'type': 'text', 'value': ' for your great help!', 'raw': ' for your great help!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝙏𝙧𝙮 𝙞𝙩 𝙝𝙚𝙧𝙚: ', 'raw': '𝙏𝙧𝙮 𝙞𝙩 𝙝𝙚𝙧𝙚: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'm-ric/beam_search_visualizer'}, 'url': 'https://huggingface.co/spaces/m-ric/beam_search_visualizer', 'raw': 'https://huggingface.co/spaces/m-ric/beam_search_visualizer'}]","𝗛𝗼𝘄 𝗱𝗼𝗲𝘀 𝗯𝗲𝗮𝗺 𝘀𝗲𝗮𝗿𝗰𝗵 𝗱𝗲𝗰𝗼𝗱𝗶𝗻𝗴 𝘄𝗼𝗿𝗸? ➡️ 𝙉𝙚𝙬 𝙫𝙞𝙨𝙪𝙖𝙡𝙞𝙯𝙖𝙩𝙞𝙤𝙣 𝙩𝙤𝙤𝙡! 👀 + +In Decoder-type LLMs like GPT4 or Mistral-Large, the output is generated one token (=word part) at a time. That's why they're nicknamed ""stochastic parrots"": the ""thinking"" process only happens one step at a time, so it can seem really myopic. + +𝐒𝐨 𝐡𝐨𝐰 𝐢𝐬 𝐭𝐡𝐞 𝐧𝐞𝐱𝐭 𝐭𝐨𝐤𝐞𝐧 𝐬𝐞𝐥𝐞𝐜𝐭𝐞𝐝? + +📊 Given its input sentence like ""𝘞𝘩𝘢𝘵 𝘪𝘴 𝘵𝘩𝘦 7𝘵𝘩 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳? 𝘛𝘩𝘦 7𝘵𝘩 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳"", the Decoder LLM generates, for each token in its vocabulary, a score that represents this token's probability of coming next. +For instance: ""𝙞𝙨"" gets score 0.56, and ""𝙘𝙖𝙣"" gets score 0.35. + +🤑 𝐆𝐫𝐞𝐞𝐝𝐲 𝐝𝐞𝐜𝐨𝐝𝐢𝐧𝐠 is the naive option where you simply take the next most probable token at each step. But this creates paths that maximize very short-term rewards, thus may overlook better paths for the long term (like this time when you played FIFA all evening and arrived unprepared to your school exam on the next day). +In our example, the next highest score token might be ""𝙞𝙨"", but this will strongly bias the LLM towards giving an hasty response. On the opposite, starting with ""𝙘𝙖𝙣"" could have been completed with ""𝘣𝘦 𝘰𝘣𝘵𝘢𝘪𝘯𝘦𝘥 𝘧𝘳𝘰𝘮 𝘤𝘰𝘮𝘱𝘶𝘵𝘪𝘯𝘨 𝘱𝘳𝘦𝘷𝘪𝘰𝘶𝘴 𝘍𝘪𝘣𝘰𝘯𝘢𝘤𝘤𝘪 𝘯𝘶𝘮𝘣𝘦𝘳𝘴 𝘧𝘪𝘳𝘴𝘵"", which steers the LLM towards a correct reasoning! + +🗺️ 𝐁𝐞𝐚𝐦 𝐬𝐞𝐚𝐫𝐜𝐡 improves on greedy decoding by generating at each step several paths - called beams - instead of one. This allows the generation to explore a much larger space, thus find better completions. In our example, both the ""𝙞𝙨"" and the ""𝙘𝙖𝙣"" completion could be tested. ✅ + +👉 I've created a tool to let you visualize it, thank you @joaogante for your great help! +𝙏𝙧𝙮 𝙞𝙩 𝙝𝙚𝙧𝙚: https://huggingface.co/spaces/m-ric/beam_search_visualizer",[],"[{'_id': '61d2c54d76c37de24cfed058', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1641203017724-noauth.png', 'fullname': 'Joao Gante', 'name': 'joaogante', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 138}]","[{'reaction': '❤️', 'users': ['samusenps', 'matlok', 'joaogante', 'clefourrier', 'fieryTransition', 'Jafta', 'awagner-mainz'], 'count': 7}, {'reaction': '🔥', 'users': ['osanseviero', 'Awal', 'mmhamdy', 'samusenps', 'joaogante', 'clefourrier'], 'count': 6}, {'reaction': '🤯', 'users': ['osanseviero'], 'count': 1}]",2024-03-26 17:23:28,2024-03-26 17:23:28.756,[],/posts/m-ric/751873931507932,1718,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg,2738.0,Julien Chaumond,julien-c,236311186560364,"[{'type': 'text', 'value': 'Very glad to welcome ', 'raw': 'Very glad to welcome '}, {'type': 'mention', 'user': 'josefprusa', 'raw': '@josefprusa'}, {'type': 'text', 'value': ', pioneer of 3D printing and open source hardware, founder of ', 'raw': ', pioneer of 3D printing and open source hardware, founder of '}, {'type': 'link', 'href': 'https://www.prusa3d.com/', 'raw': 'https://www.prusa3d.com/'}, {'type': 'text', 'value': ', to the HF Hub 👋', 'raw': ', to the HF Hub 👋'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AI applied to 3D printing could be big.', 'raw': 'AI applied to 3D printing could be big.'}]","Very glad to welcome @josefprusa, pioneer of 3D printing and open source hardware, founder of https://www.prusa3d.com/, to the HF Hub 👋 + +AI applied to 3D printing could be big.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5dd96eb166059660ed1ee413/Buyqz5oR6wuj5-C9iFUkE.png'}]","[{'_id': '644079ac27dc46cca5902f53', 'avatarUrl': '/avatars/533162c0048b18e983a1b220b82230e1.svg', 'fullname': 'Josef Průša', 'name': 'josefprusa', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2058}]","[{'reaction': '🔥', 'users': ['victor', 'London12345', 'satpalsr', 'diwank', 'ZennyKenny', 'radames', 'adamelliotfields', 'VictorSanh', 'mmhamdy', 'samusenps', 'pcuenq', 'kramp', 'osanseviero', 'stattmone', 'mdouglas', 'clem', 'Skier8402', 'jarvisx17'], 'count': 18}, {'reaction': '👍', 'users': ['Aurelien-Morgan', 'imomayiz', 'mmhamdy', 'samusenps', 'clem'], 'count': 5}, {'reaction': '❤️', 'users': ['samusenps', 'osanseviero', 'clem'], 'count': 3}, {'reaction': '👀', 'users': ['samusenps', 'clem'], 'count': 2}, {'reaction': '➕', 'users': ['samusenps', 'clem'], 'count': 2}]",2024-03-26 16:25:55,2024-03-28 16:48:32.476,"[{'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}]",/posts/julien-c/236311186560364,3209,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,161743256497155,"[{'type': 'text', 'value': 'LLM Agent Operating System', 'raw': 'LLM Agent Operating System'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.16971'}, 'url': 'https://huggingface.co/papers/2403.16971', 'raw': 'https://huggingface.co/papers/2403.16971', 'label': 'LLM Agent Operating System (2403.16971)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The integration and deployment of large language model (LLM)-based intelligent agents have been fraught with challenges that compromise their efficiency and efficacy. Among these issues are sub-optimal scheduling and resource allocation of agent requests over the LLM, the difficulties in maintaining context during interactions between agent and LLM, and the complexities inherent in integrating heterogeneous agents with different capabilities and specializations. The rapid increase of agent quantity and complexity further exacerbates these issues, often leading to bottlenecks and sub-optimal utilization of resources. Inspired by these challenges, this paper presents AIOS, an LLM agent operating system, which embeds large language model into operating systems (OS). Specifically, AIOS is designed to optimize resource allocation, facilitate context switch across agents, enable concurrent execution of agents, provide tool service for agents, and maintain access control for agents. We present the architecture of such an operating system, outline the core challenges it aims to resolve, and provide the basic design and implementation of the AIOS. Our experiments on concurrent execution of multiple agents demonstrate the reliability and efficiency of our AIOS modules. Through this, we aim to not only improve the performance and efficiency of LLM agents but also to pioneer for better development and deployment of the AIOS ecosystem in the future.', 'raw': 'The integration and deployment of large language model (LLM)-based intelligent agents have been fraught with challenges that compromise their efficiency and efficacy. Among these issues are sub-optimal scheduling and resource allocation of agent requests over the LLM, the difficulties in maintaining context during interactions between agent and LLM, and the complexities inherent in integrating heterogeneous agents with different capabilities and specializations. The rapid increase of agent quantity and complexity further exacerbates these issues, often leading to bottlenecks and sub-optimal utilization of resources. Inspired by these challenges, this paper presents AIOS, an LLM agent operating system, which embeds large language model into operating systems (OS). Specifically, AIOS is designed to optimize resource allocation, facilitate context switch across agents, enable concurrent execution of agents, provide tool service for agents, and maintain access control for agents. We present the architecture of such an operating system, outline the core challenges it aims to resolve, and provide the basic design and implementation of the AIOS. Our experiments on concurrent execution of multiple agents demonstrate the reliability and efficiency of our AIOS modules. Through this, we aim to not only improve the performance and efficiency of LLM agents but also to pioneer for better development and deployment of the AIOS ecosystem in the future.'}]","LLM Agent Operating System + +https://huggingface.co/papers/2403.16971 + +The integration and deployment of large language model (LLM)-based intelligent agents have been fraught with challenges that compromise their efficiency and efficacy. Among these issues are sub-optimal scheduling and resource allocation of agent requests over the LLM, the difficulties in maintaining context during interactions between agent and LLM, and the complexities inherent in integrating heterogeneous agents with different capabilities and specializations. The rapid increase of agent quantity and complexity further exacerbates these issues, often leading to bottlenecks and sub-optimal utilization of resources. Inspired by these challenges, this paper presents AIOS, an LLM agent operating system, which embeds large language model into operating systems (OS). Specifically, AIOS is designed to optimize resource allocation, facilitate context switch across agents, enable concurrent execution of agents, provide tool service for agents, and maintain access control for agents. We present the architecture of such an operating system, outline the core challenges it aims to resolve, and provide the basic design and implementation of the AIOS. Our experiments on concurrent execution of multiple agents demonstrate the reliability and efficiency of our AIOS modules. Through this, we aim to not only improve the performance and efficiency of LLM agents but also to pioneer for better development and deployment of the AIOS ecosystem in the future.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/kQUttU5GaHg-9HDxed9JT.png'}]",[],"[{'reaction': '❤️', 'users': ['samusenps', 'QiushiSun', 'osanseviero', 'alielfilali01', 'AtAndDev', 'Jakaline'], 'count': 6}, {'reaction': '👀', 'users': ['diwank', 'AtAndDev', 'osanseviero'], 'count': 3}, {'reaction': '🚀', 'users': ['samusenps', 'ozayezerceli', 'AtAndDev'], 'count': 3}, {'reaction': '👍', 'users': ['dashfunnydashdash', 'AtAndDev', 'shiv2050'], 'count': 3}, {'reaction': '🤗', 'users': ['ozayezerceli', 'AtAndDev'], 'count': 2}]",2024-03-26 15:32:12,2024-03-27 06:54:25.215,"[{'_id': '62fd781565ba08da9cd68eba', 'avatarUrl': '/avatars/88a391b315665c42bced2df0c300bf8d.svg', 'fullname': 'Vishwas', 'name': 'Vishwas1', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '644c11d4cfebfbf8d8ffbd0a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/n55MWo2SfacYfVI6NerkU.png', 'fullname': 'Alan B Cole', 'name': 'Xaln', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/akhaliq/161743256497155,2214,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6438a9027de34e8ea7e4b257/vib8QSd1AWMr_bR9ig_xJ.jpeg,331.0,Jaward Sesay,Jaward,428244381121381,"[{'type': 'text', 'value': 'MLX RAG with GGUF Models', 'raw': 'MLX RAG with GGUF Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Minimal, clean code implementation of RAG with mlx inferencing for GGUF models.', 'raw': 'Minimal, clean code implementation of RAG with mlx inferencing for GGUF models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/Jaykef/mlx-rag-gguf', 'raw': 'https://github.com/Jaykef/mlx-rag-gguf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""The code here builds on vegaluisjose's example, it has been optimized to support RAG-based inferencing for .gguf models. I am using BAAI/bge-small-en for the embedding model, tinyllama-1.1b-chat-v1.0.Q4_0.gguf as base model and the custom vector database script for indexing texts in a pdf file. Inference speeds can go up to ~413 tokens/sec for prompts and ~36 tokens/sec for generation on my M2 Air."", 'raw': ""The code here builds on vegaluisjose's example, it has been optimized to support RAG-based inferencing for .gguf models. I am using BAAI/bge-small-en for the embedding model, tinyllama-1.1b-chat-v1.0.Q4_0.gguf as base model and the custom vector database script for indexing texts in a pdf file. Inference speeds can go up to ~413 tokens/sec for prompts and ~36 tokens/sec for generation on my M2 Air.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Queries make use of both .gguf (base model) and .npz (retrieval model) simultaneouly resulting in much higher inferencing speeds.', 'raw': 'Queries make use of both .gguf (base model) and .npz (retrieval model) simultaneouly resulting in much higher inferencing speeds.'}]","MLX RAG with GGUF Models +Minimal, clean code implementation of RAG with mlx inferencing for GGUF models. + +Code: https://github.com/Jaykef/mlx-rag-gguf + +The code here builds on vegaluisjose's example, it has been optimized to support RAG-based inferencing for .gguf models. I am using BAAI/bge-small-en for the embedding model, tinyllama-1.1b-chat-v1.0.Q4_0.gguf as base model and the custom vector database script for indexing texts in a pdf file. Inference speeds can go up to ~413 tokens/sec for prompts and ~36 tokens/sec for generation on my M2 Air. + +Queries make use of both .gguf (base model) and .npz (retrieval model) simultaneouly resulting in much higher inferencing speeds.","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6438a9027de34e8ea7e4b257/_VswNFRiMFxPPy3todCWu.mp4'}]",[],"[{'reaction': '❤️', 'users': ['samusenps'], 'count': 1}]",2024-03-26 13:38:01,2024-03-26 13:38:01.882,[],/posts/Jaward/428244381121381,1470,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png,3221.0,Omar Sanseviero,osanseviero,481178346523516,"[{'type': 'text', 'value': 'Diaries of Open Source. Part 10 🚀', 'raw': 'Diaries of Open Source. Part 10 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌼Marigold-LCM: A super fast SOTA Depth Estimator', 'raw': '🌼Marigold-LCM: A super fast SOTA Depth Estimator'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'prs-eth/marigold-lcm'}, 'url': 'https://hf.co/spaces/prs-eth/marigold-lcm', 'raw': 'https://hf.co/spaces/prs-eth/marigold-lcm'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Original paper: ', 'raw': 'Original paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2312.02145'}, 'url': 'https://hf.co/papers/2312.02145', 'raw': 'https://hf.co/papers/2312.02145', 'label': 'Repurposing Diffusion-Based Image Generators for Monocular Depth\n Estimation (2312.02145)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'link', 'href': 'https://hf.co/prs-eth/marigold-lcm-v1-0', 'raw': 'https://hf.co/prs-eth/marigold-lcm-v1-0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌟Quiet-STaR: A self-teaching technique via internal monologue', 'raw': '🌟Quiet-STaR: A self-teaching technique via internal monologue'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.09629'}, 'url': 'https://hf.co/papers/2403.09629', 'raw': 'https://hf.co/papers/2403.09629', 'label': 'Quiet-STaR: Language Models Can Teach Themselves to Think Before\n Speaking (2403.09629)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://github.com/ezelikman/quiet-star', 'raw': 'https://github.com/ezelikman/quiet-star'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Tweetutorial: ', 'raw': 'Tweetutorial: '}, {'type': 'link', 'href': 'https://twitter.com/ericzelikman/status/1768663835106513041', 'raw': 'https://twitter.com/ericzelikman/status/1768663835106513041'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖼️ WebSight v0.2: A image-to-code dataset containing tailwind CSS, images in screenshots, and more!', 'raw': '🖼️ WebSight v0.2: A image-to-code dataset containing tailwind CSS, images in screenshots, and more!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HuggingFaceM4/WebSight'}, 'url': 'https://hf.co/datasets/HuggingFaceM4/WebSight', 'raw': 'https://hf.co/datasets/HuggingFaceM4/WebSight'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.09029'}, 'url': 'https://hf.co/papers/2403.09029', 'raw': 'https://hf.co/papers/2403.09029', 'label': 'Unlocking the conversion of Web Screenshots into HTML Code with the\n WebSight Dataset (2403.09029)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://hf.co/blog/websight', 'raw': 'https://hf.co/blog/websight'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🕵️Agent-FLAN - effective agent tuning for LLMs', 'raw': '🕵️Agent-FLAN - effective agent tuning for LLMs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.12881'}, 'url': 'https://hf.co/papers/2403.12881', 'raw': 'https://hf.co/papers/2403.12881', 'label': 'Agent-FLAN: Designing Data and Methods of Effective Agent Tuning for\n Large Language Models (2403.12881)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'internlm/Agent-FLAN-7b'}, 'url': 'https://hf.co/internlm/Agent-FLAN-7b', 'raw': 'https://hf.co/internlm/Agent-FLAN-7b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'internlm/Agent-FLAN'}, 'url': 'https://hf.co/datasets/internlm/Agent-FLAN', 'raw': 'https://hf.co/datasets/internlm/Agent-FLAN'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Website: ', 'raw': 'Website: '}, {'type': 'link', 'href': 'https://internlm.github.io/Agent-FLAN/', 'raw': 'https://internlm.github.io/Agent-FLAN/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔥HPT, a family of multimodal LLMs from HyperGAI', 'raw': '🔥HPT, a family of multimodal LLMs from HyperGAI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog post: ', 'raw': 'Blog post: '}, {'type': 'link', 'href': 'https://hypergai.com/blog/introducing-hpt-a-family-of-leading-multimodal-llms', 'raw': 'https://hypergai.com/blog/introducing-hpt-a-family-of-leading-multimodal-llms'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'HyperGAI/HPT'}, 'url': 'https://huggingface.co/HyperGAI/HPT', 'raw': 'https://huggingface.co/HyperGAI/HPT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GitHub: ', 'raw': 'GitHub: '}, {'type': 'link', 'href': 'https://github.com/hyperGAI/HPT', 'raw': 'https://github.com/hyperGAI/HPT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌏Models and datasets around the world', 'raw': '🌏Models and datasets around the world'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Tess-70B, a MiQu-70B fine-tune with high-quality data ', 'raw': '- Tess-70B, a MiQu-70B fine-tune with high-quality data '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'migtissera/Tess-70B-v1.6'}, 'url': 'https://hf.co/migtissera/Tess-70B-v1.6', 'raw': 'https://hf.co/migtissera/Tess-70B-v1.6'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- UNI, a model trained on 100 million pathology images from 100k+ slides ', 'raw': '- UNI, a model trained on 100 million pathology images from 100k+ slides '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'MahmoodLab/UNI'}, 'url': 'https://hf.co/MahmoodLab/UNI', 'raw': 'https://hf.co/MahmoodLab/UNI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- CONCH, a VLM trained on 1.17 million pathology image-text pairs ', 'raw': '- CONCH, a VLM trained on 1.17 million pathology image-text pairs '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'MahmoodLab/CONCH'}, 'url': 'https://hf.co/MahmoodLab/CONCH', 'raw': 'https://hf.co/MahmoodLab/CONCH'}]","Diaries of Open Source. Part 10 🚀 + +🌼Marigold-LCM: A super fast SOTA Depth Estimator +Demo: https://hf.co/spaces/prs-eth/marigold-lcm +Original paper: https://hf.co/papers/2312.02145 +Model: https://hf.co/prs-eth/marigold-lcm-v1-0 + +🌟Quiet-STaR: A self-teaching technique via internal monologue +Paper: https://hf.co/papers/2403.09629 +GitHub: https://github.com/ezelikman/quiet-star +Tweetutorial: https://twitter.com/ericzelikman/status/1768663835106513041 + +🖼️ WebSight v0.2: A image-to-code dataset containing tailwind CSS, images in screenshots, and more! +Dataset: https://hf.co/datasets/HuggingFaceM4/WebSight +Paper: https://hf.co/papers/2403.09029 +Blog: https://hf.co/blog/websight + +🕵️Agent-FLAN - effective agent tuning for LLMs +Paper: https://hf.co/papers/2403.12881 +Model: https://hf.co/internlm/Agent-FLAN-7b +Dataset: https://hf.co/datasets/internlm/Agent-FLAN +Website: https://internlm.github.io/Agent-FLAN/ + +🔥HPT, a family of multimodal LLMs from HyperGAI +Blog post: https://hypergai.com/blog/introducing-hpt-a-family-of-leading-multimodal-llms +Model: https://huggingface.co/HyperGAI/HPT +GitHub: https://github.com/hyperGAI/HPT + +🌏Models and datasets around the world +- Tess-70B, a MiQu-70B fine-tune with high-quality data https://hf.co/migtissera/Tess-70B-v1.6 +- UNI, a model trained on 100 million pathology images from 100k+ slides https://hf.co/MahmoodLab/UNI +- CONCH, a VLM trained on 1.17 million pathology image-text pairs https://hf.co/MahmoodLab/CONCH",[],[],"[{'reaction': '🤗', 'users': ['Bingxin', 'toshas', 'DmitryRyumin', 'thomwolf', 'migtissera', 'julien-c', 'London12345', 'mmhamdy', 'samusenps', 'pcuenq', 'victor', 'sbrandeis'], 'count': 12}, {'reaction': '❤️', 'users': ['toshas', 'migtissera', 'julien-c', 'mmhamdy', 'pabloce', 'samusenps', 'pcuenq'], 'count': 7}, {'reaction': '🔥', 'users': ['toshas', 'DmitryRyumin', 'migtissera', 'mmhamdy'], 'count': 4}, {'reaction': '😔', 'users': ['samusenps'], 'count': 1}]",2024-03-26 11:48:14,2024-03-28 05:40:19.342,"[{'_id': '658a2e76367c76b8ee481b5c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/658a2e76367c76b8ee481b5c/SF1IA8CeQBdc7w1cNR6c0.jpeg', 'fullname': 'Bryan Alvarado Villalobos', 'name': 'thebryanalvarado', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '647a6317555b5e199cffd5a2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/647a6317555b5e199cffd5a2/PZJmfzzuM_Vo-8jFGkN_H.jpeg', 'fullname': 'Migel Tissera', 'name': 'migtissera', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 460, 'isFollowing': False}]",/posts/osanseviero/481178346523516,1657,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg,77.0,Kenneth Hamilton,ZennyKenny,961953524095401,"[{'type': 'text', 'value': 'Are you interested in contributing to open source multilingual AI with Hugging Face and Argilla? ', 'raw': 'Are you interested in contributing to open source multilingual AI with Hugging Face and Argilla? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The MPEP initiative (', 'raw': 'The MPEP initiative ('}, {'type': 'link', 'href': 'https://github.com/huggingface/data-is-better-together/tree/main/prompt_translation', 'raw': 'https://github.com/huggingface/data-is-better-together/tree/main/prompt_translation'}, {'type': 'text', 'value': ') of the Data is Better Together project offers the opportunity to do just that by helping to create multilingual model checkpoints. ', 'raw': ') of the Data is Better Together project offers the opportunity to do just that by helping to create multilingual model checkpoints. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you're interested in contributing to the Russian-language dataset, please get in touch as I am the Russian-language lead. If you're interested in contributing to another language, the MPEP link above has all the information you need to do so. 🤗 "", 'raw': ""If you're interested in contributing to the Russian-language dataset, please get in touch as I am the Russian-language lead. If you're interested in contributing to another language, the MPEP link above has all the information you need to do so. 🤗 ""}]","Are you interested in contributing to open source multilingual AI with Hugging Face and Argilla? + +The MPEP initiative (https://github.com/huggingface/data-is-better-together/tree/main/prompt_translation) of the Data is Better Together project offers the opportunity to do just that by helping to create multilingual model checkpoints. + +If you're interested in contributing to the Russian-language dataset, please get in touch as I am the Russian-language lead. If you're interested in contributing to another language, the MPEP link above has all the information you need to do so. 🤗 ",[],[],"[{'reaction': '🤗', 'users': ['davanstrien', 'samusenps', 'victor', 'taufiqdp', 'osanseviero', 'Priceva', 'sedayilmazer', 'dvilasuero', 'clefourrier', 'kristaller486', 'ZennyKenny', 'alielfilali01'], 'count': 12}, {'reaction': '❤️', 'users': ['davanstrien', 'samusenps', 'osanseviero', 'cstr', 'dvilasuero', 'clefourrier', 'ZennyKenny', 'alielfilali01', 'Tonic'], 'count': 9}]",2024-03-19 21:04:57,2024-03-19 21:12:32.475,"[{'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638, 'isFollowing': False}, {'_id': '656e3808d4de03a07d116850', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/656e3808d4de03a07d116850/62cFw46AmuhdI3gS24F1M.jpeg', 'fullname': 'Kenneth Hamilton', 'name': 'ZennyKenny', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 77, 'isFollowing': False}]",/posts/ZennyKenny/961953524095401,2039,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/63d10d4e8eaa4831005e92b5/7p7-OmWM6PqqCs7ZStPGD.jpeg,1652.0,Aymeric Roucher,m-ric,709432469332844,"[{'type': 'text', 'value': '𝗨𝘀𝗶𝗻𝗴 𝗟𝗟𝗠-𝗮𝘀-𝗮-𝗷𝘂𝗱𝗴𝗲 🧑\u200d⚖️ 𝗳𝗼𝗿 𝗮𝗻 𝗮𝘂𝘁𝗼𝗺𝗮𝘁𝗲𝗱 𝗮𝗻𝗱 𝘃𝗲𝗿𝘀𝗮𝘁𝗶𝗹𝗲 𝗲𝘃𝗮𝗹𝘂𝗮𝘁𝗶𝗼𝗻', 'raw': '𝗨𝘀𝗶𝗻𝗴 𝗟𝗟𝗠-𝗮𝘀-𝗮-𝗷𝘂𝗱𝗴𝗲 🧑\u200d⚖️ 𝗳𝗼𝗿 𝗮𝗻 𝗮𝘂𝘁𝗼𝗺𝗮𝘁𝗲𝗱 𝗮𝗻𝗱 𝘃𝗲𝗿𝘀𝗮𝘁𝗶𝗹𝗲 𝗲𝘃𝗮𝗹𝘂𝗮𝘁𝗶𝗼𝗻'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Evaluating LLM outputs is often hard, since many tasks require open-ended answers for which no deterministic metrics work: for instance, when asking a model to summarize a text, there could be hundreds of correct ways to do it. The most versatile way to grade these outputs is then human evaluation, but it is very time-consuming, thus costly.', 'raw': 'Evaluating LLM outputs is often hard, since many tasks require open-ended answers for which no deterministic metrics work: for instance, when asking a model to summarize a text, there could be hundreds of correct ways to do it. The most versatile way to grade these outputs is then human evaluation, but it is very time-consuming, thus costly.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤔 Then 𝘄𝗵𝘆 𝗻𝗼𝘁 𝗮𝘀𝗸 𝗮𝗻𝗼𝘁𝗵𝗲𝗿 𝗟𝗟𝗠 𝘁𝗼 𝗱𝗼 𝘁𝗵𝗲 𝗲𝘃𝗮𝗹𝘂𝗮𝘁𝗶𝗼𝗻, by providing it relevant rating criteria? 👉 This is the idea behind LLM-as-a-judge.', 'raw': '🤔 Then 𝘄𝗵𝘆 𝗻𝗼𝘁 𝗮𝘀𝗸 𝗮𝗻𝗼𝘁𝗵𝗲𝗿 𝗟𝗟𝗠 𝘁𝗼 𝗱𝗼 𝘁𝗵𝗲 𝗲𝘃𝗮𝗹𝘂𝗮𝘁𝗶𝗼𝗻, by providing it relevant rating criteria? 👉 This is the idea behind LLM-as-a-judge.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙️ To implement a LLM judge correctly, you need a few tricks.', 'raw': '⚙️ To implement a LLM judge correctly, you need a few tricks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""✅ So 𝗜'𝘃𝗲 𝗷𝘂𝘀𝘁 𝗽𝘂𝗯𝗹𝗶𝘀𝗵𝗲𝗱 𝗮 𝗻𝗲𝘄 𝗻𝗼𝘁𝗲𝗯𝗼𝗼𝗸 𝘀𝗵𝗼𝘄𝗶𝗻𝗴 𝗵𝗼𝘄 𝘁𝗼 𝗶𝗺𝗽𝗹𝗲𝗺𝗲𝗻𝘁 𝗶𝘁 𝗽𝗿𝗼𝗽𝗲𝗿𝗹𝘆 𝗶𝗻 𝗼𝘂𝗿 𝗛𝘂𝗴𝗴𝗶𝗻𝗴 𝗙𝗮𝗰𝗲 𝗖𝗼𝗼𝗸𝗯𝗼𝗼𝗸! (you can run it instantly in Google Colab)"", 'raw': ""✅ So 𝗜'𝘃𝗲 𝗷𝘂𝘀𝘁 𝗽𝘂𝗯𝗹𝗶𝘀𝗵𝗲𝗱 𝗮 𝗻𝗲𝘄 𝗻𝗼𝘁𝗲𝗯𝗼𝗼𝗸 𝘀𝗵𝗼𝘄𝗶𝗻𝗴 𝗵𝗼𝘄 𝘁𝗼 𝗶𝗺𝗽𝗹𝗲𝗺𝗲𝗻𝘁 𝗶𝘁 𝗽𝗿𝗼𝗽𝗲𝗿𝗹𝘆 𝗶𝗻 𝗼𝘂𝗿 𝗛𝘂𝗴𝗴𝗶𝗻𝗴 𝗙𝗮𝗰𝗲 𝗖𝗼𝗼𝗸𝗯𝗼𝗼𝗸! (you can run it instantly in Google Colab)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ 𝗟𝗟𝗠-𝗮𝘀-𝗮-𝗷𝘂𝗱𝗴𝗲 𝗰𝗼𝗼𝗸𝗯𝗼𝗼𝗸: ', 'raw': '➡️ 𝗟𝗟𝗠-𝗮𝘀-𝗮-𝗷𝘂𝗱𝗴𝗲 𝗰𝗼𝗼𝗸𝗯𝗼𝗼𝗸: '}, {'type': 'link', 'href': 'https://huggingface.co/learn/cookbook/llm_judge', 'raw': 'https://huggingface.co/learn/cookbook/llm_judge'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The Cookbook is a great collection of notebooks demonstrating recipes (thus the ""cookbook"") for common LLM usages. I recommend you to go take a look!', 'raw': 'The Cookbook is a great collection of notebooks demonstrating recipes (thus the ""cookbook"") for common LLM usages. I recommend you to go take a look!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '➡️ 𝗔𝗹𝗹 𝗰𝗼𝗼𝗸𝗯𝗼𝗼𝗸𝘀: ', 'raw': '➡️ 𝗔𝗹𝗹 𝗰𝗼𝗼𝗸𝗯𝗼𝗼𝗸𝘀: '}, {'type': 'link', 'href': 'https://huggingface.co/learn/cookbook/index', 'raw': 'https://huggingface.co/learn/cookbook/index'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thank you ', 'raw': 'Thank you '}, {'type': 'mention', 'user': 'MariaK', 'raw': '@MariaK'}, {'type': 'text', 'value': ' for your support!', 'raw': ' for your support!'}]","𝗨𝘀𝗶𝗻𝗴 𝗟𝗟𝗠-𝗮𝘀-𝗮-𝗷𝘂𝗱𝗴𝗲 🧑‍⚖️ 𝗳𝗼𝗿 𝗮𝗻 𝗮𝘂𝘁𝗼𝗺𝗮𝘁𝗲𝗱 𝗮𝗻𝗱 𝘃𝗲𝗿𝘀𝗮𝘁𝗶𝗹𝗲 𝗲𝘃𝗮𝗹𝘂𝗮𝘁𝗶𝗼𝗻 + +Evaluating LLM outputs is often hard, since many tasks require open-ended answers for which no deterministic metrics work: for instance, when asking a model to summarize a text, there could be hundreds of correct ways to do it. The most versatile way to grade these outputs is then human evaluation, but it is very time-consuming, thus costly. + +🤔 Then 𝘄𝗵𝘆 𝗻𝗼𝘁 𝗮𝘀𝗸 𝗮𝗻𝗼𝘁𝗵𝗲𝗿 𝗟𝗟𝗠 𝘁𝗼 𝗱𝗼 𝘁𝗵𝗲 𝗲𝘃𝗮𝗹𝘂𝗮𝘁𝗶𝗼𝗻, by providing it relevant rating criteria? 👉 This is the idea behind LLM-as-a-judge. + +⚙️ To implement a LLM judge correctly, you need a few tricks. +✅ So 𝗜'𝘃𝗲 𝗷𝘂𝘀𝘁 𝗽𝘂𝗯𝗹𝗶𝘀𝗵𝗲𝗱 𝗮 𝗻𝗲𝘄 𝗻𝗼𝘁𝗲𝗯𝗼𝗼𝗸 𝘀𝗵𝗼𝘄𝗶𝗻𝗴 𝗵𝗼𝘄 𝘁𝗼 𝗶𝗺𝗽𝗹𝗲𝗺𝗲𝗻𝘁 𝗶𝘁 𝗽𝗿𝗼𝗽𝗲𝗿𝗹𝘆 𝗶𝗻 𝗼𝘂𝗿 𝗛𝘂𝗴𝗴𝗶𝗻𝗴 𝗙𝗮𝗰𝗲 𝗖𝗼𝗼𝗸𝗯𝗼𝗼𝗸! (you can run it instantly in Google Colab) +➡️ 𝗟𝗟𝗠-𝗮𝘀-𝗮-𝗷𝘂𝗱𝗴𝗲 𝗰𝗼𝗼𝗸𝗯𝗼𝗼𝗸: https://huggingface.co/learn/cookbook/llm_judge + +The Cookbook is a great collection of notebooks demonstrating recipes (thus the ""cookbook"") for common LLM usages. I recommend you to go take a look! +➡️ 𝗔𝗹𝗹 𝗰𝗼𝗼𝗸𝗯𝗼𝗼𝗸𝘀: https://huggingface.co/learn/cookbook/index + +Thank you @MariaK for your support!",[],"[{'_id': '6356a6dc3c32f2c90f4cbe39', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1666623150508-noauth.png', 'fullname': 'Maria Khalusova', 'name': 'MariaK', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 66}]","[{'reaction': '🔥', 'users': ['MariaK', 'lewtun', 'loubnabnl', 'andrewrreed', 'AtAndDev', 'ajibawa-2023', 'andysalerno', 'samusenps', 'Csplk', 'osanseviero', 'Priceva', 'avinash02', 'clefourrier'], 'count': 13}, {'reaction': '❤️', 'users': ['MariaK', 'lewtun', 'loubnabnl', 'andrewrreed', 'AtAndDev', 'samusenps', 'Zyn123', 'osanseviero'], 'count': 8}, {'reaction': '🚀', 'users': ['MariaK', 'lewtun', 'loubnabnl', 'andrewrreed', 'AtAndDev'], 'count': 5}]",2024-03-19 17:14:45,2024-03-26 13:01:46.239,"[{'_id': '630f3e4002ce39336c411048', 'avatarUrl': '/avatars/937a64aea8fde2f41a065f052b39f409.svg', 'fullname': 'alkinun', 'name': 'AtAndDev', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 48, 'isFollowing': False}, {'_id': '6495d5a915d8ef6f01bc75eb', 'avatarUrl': '/avatars/245ec3b183919c079f8c5023b3f7ca9f.svg', 'fullname': 'CultriX', 'name': 'CultriX', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 154, 'isFollowing': False}]",/posts/m-ric/709432469332844,2047,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png,3221.0,Omar Sanseviero,osanseviero,585071145791233,"[{'type': 'text', 'value': 'Diaries of Open Source. Part 6!', 'raw': 'Diaries of Open Source. Part 6!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏎️xAI releases Grok-1, a 314B MoE', 'raw': '🏎️xAI releases Grok-1, a 314B MoE'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog: ', 'raw': 'Blog: '}, {'type': 'link', 'href': 'https://x.ai/blog/grok-os', 'raw': 'https://x.ai/blog/grok-os'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GH repo: ', 'raw': 'GH repo: '}, {'type': 'link', 'href': 'https://github.com/xai-org/grok-1', 'raw': 'https://github.com/xai-org/grok-1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'xai-org/grok-1'}, 'url': 'https://hf.co/xai-org/grok-1', 'raw': 'https://hf.co/xai-org/grok-1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🕺MusicLang, a model for controllable music generation', 'raw': '🕺MusicLang, a model for controllable music generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'musiclang/musiclang-predict'}, 'url': 'https://hf.co/spaces/musiclang/musiclang-predict', 'raw': 'https://hf.co/spaces/musiclang/musiclang-predict'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GH repo: ', 'raw': 'GH repo: '}, {'type': 'link', 'href': 'https://github.com/musiclang/musiclang_predict', 'raw': 'https://github.com/musiclang/musiclang_predict'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔬BioT5: a family of models for biology and chemical text tasks', 'raw': '🔬BioT5: a family of models for biology and chemical text tasks'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Base model: ', 'raw': 'Base model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'QizhiPei/biot5-base'}, 'url': 'https://hf.co/QizhiPei/biot5-base', 'raw': 'https://hf.co/QizhiPei/biot5-base'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model for molecule captioning and design: ', 'raw': 'Model for molecule captioning and design: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'QizhiPei/biot5-base-mol2text'}, 'url': 'https://hf.co/QizhiPei/biot5-base-mol2text', 'raw': 'https://hf.co/QizhiPei/biot5-base-mol2text'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'QizhiPei/biot5-base-text2mol'}, 'url': 'https://hf.co/QizhiPei/biot5-base-text2mol', 'raw': 'https://hf.co/QizhiPei/biot5-base-text2mol'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GH Repo: ', 'raw': 'GH Repo: '}, {'type': 'link', 'href': 'https://github.com/QizhiPei/BioT5', 'raw': 'https://github.com/QizhiPei/BioT5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2310.07276'}, 'url': 'https://hf.co/papers/2310.07276', 'raw': 'https://hf.co/papers/2310.07276', 'label': 'BioT5: Enriching Cross-modal Integration in Biology with Chemical\n Knowledge and Natural Language Associations (2310.07276)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤏Check out the AQLM and QMoE official weights from ISTA-DAS lab', 'raw': '🤏Check out the AQLM and QMoE official weights from ISTA-DAS lab'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Org: ', 'raw': 'Org: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'ISTA-DASLab'}, 'url': 'https://hf.co/ISTA-DASLab', 'raw': 'https://hf.co/ISTA-DASLab', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/628e0ce4e53bbd334577fcb0/TRPtgtSavYjDJOK3S1I8M.png'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Papers: ', 'raw': 'Papers: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.06118'}, 'url': 'https://hf.co/papers/2401.06118', 'raw': 'https://hf.co/papers/2401.06118', 'label': 'Extreme Compression of Large Language Models via Additive Quantization (2401.06118)'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2310.16795'}, 'url': 'https://hf.co/papers/2310.16795', 'raw': 'https://hf.co/papers/2310.16795', 'label': 'QMoE: Practical Sub-1-Bit Compression of Trillion-Parameter Models (2310.16795)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀Community releases', 'raw': '🚀Community releases'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Einstein-v4-7B, a Mistral fine-tune on high-quality data ', 'raw': 'Einstein-v4-7B, a Mistral fine-tune on high-quality data '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Weyaxi/Einstein-v4-7B'}, 'url': 'https://hf.co/Weyaxi/Einstein-v4-7B', 'raw': 'https://hf.co/Weyaxi/Einstein-v4-7B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'IL-7B, a Misttral fine-tune merge for rheumatology ', 'raw': 'IL-7B, a Misttral fine-tune merge for rheumatology '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'cmcmaster/il_7b'}, 'url': 'https://hf.co/cmcmaster/il_7b', 'raw': 'https://hf.co/cmcmaster/il_7b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' Caselaw Access Project, a collaboration to digitalize 40 million US court decisions from 6.7 million cases from 360 years ', 'raw': ' Caselaw Access Project, a collaboration to digitalize 40 million US court decisions from 6.7 million cases from 360 years '}, {'type': 'link', 'href': 'https://hf.co/datasets/TeraflopAI/Caselaw_Access_Project', 'raw': 'https://hf.co/datasets/TeraflopAI/Caselaw_Access_Project'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌍Data and models around the world', 'raw': '🌍Data and models around the world'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'HPLT Monolingual, a dataset of 75 languages with over 40TB of data ', 'raw': 'HPLT Monolingual, a dataset of 75 languages with over 40TB of data '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HPLT/hplt_monolingual_v1_2'}, 'url': 'https://hf.co/datasets/HPLT/hplt_monolingual_v1_2', 'raw': 'https://hf.co/datasets/HPLT/hplt_monolingual_v1_2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'OpenLLM Turkish Benchmarks & Leaderboard ', 'raw': 'OpenLLM Turkish Benchmarks & Leaderboard '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'malhajar/openllmturkishleadboard-datasets-65e5854490a87c0f2670ec18'}, 'url': 'https://hf.co/collections/malhajar/openllmturkishleadboard-datasets-65e5854490a87c0f2670ec18', 'raw': 'https://hf.co/collections/malhajar/openllmturkishleadboard-datasets-65e5854490a87c0f2670ec18'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'malhajar/OpenLLMTurkishLeaderboard'}, 'url': 'https://hf.co/spaces/malhajar/OpenLLMTurkishLeaderboard', 'raw': 'https://hf.co/spaces/malhajar/OpenLLMTurkishLeaderboard'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Occiglot, a collaborative effort for European LLMs with an initial release of 7B models for French, German, Spanish, and Italian ', 'raw': 'Occiglot, a collaborative effort for European LLMs with an initial release of 7B models for French, German, Spanish, and Italian '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'occiglot/occiglot-eu5-7b-v01-65dbed502a6348b052695e01'}, 'url': 'https://hf.co/collections/occiglot/occiglot-eu5-7b-v01-65dbed502a6348b052695e01', 'raw': 'https://hf.co/collections/occiglot/occiglot-eu5-7b-v01-65dbed502a6348b052695e01'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Guftagoo, a Hindi+Hinglish multi-turn conversational dataset ', 'raw': 'Guftagoo, a Hindi+Hinglish multi-turn conversational dataset '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Tensoic/gooftagoo'}, 'url': 'https://hf.co/datasets/Tensoic/gooftagoo', 'raw': 'https://hf.co/datasets/Tensoic/gooftagoo'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AryaBhatta-Orca-Maths-Hindi dataset ', 'raw': 'AryaBhatta-Orca-Maths-Hindi dataset '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'GenVRadmin/Aryabhatta-Orca-Maths-Hindi'}, 'url': 'https://hf.co/datasets/GenVRadmin/Aryabhatta-Orca-Maths-Hindi', 'raw': 'https://hf.co/datasets/GenVRadmin/Aryabhatta-Orca-Maths-Hindi'}]","Diaries of Open Source. Part 6! + +🏎️xAI releases Grok-1, a 314B MoE +Blog: https://x.ai/blog/grok-os +GH repo: https://github.com/xai-org/grok-1 +Model: https://hf.co/xai-org/grok-1 + +🕺MusicLang, a model for controllable music generation +Demo: https://hf.co/spaces/musiclang/musiclang-predict +GH repo: https://github.com/musiclang/musiclang_predict + +🔬BioT5: a family of models for biology and chemical text tasks +Base model: https://hf.co/QizhiPei/biot5-base +Model for molecule captioning and design: https://hf.co/QizhiPei/biot5-base-mol2text and https://hf.co/QizhiPei/biot5-base-text2mol +GH Repo: https://github.com/QizhiPei/BioT5 +Paper: https://hf.co/papers/2310.07276 + +🤏Check out the AQLM and QMoE official weights from ISTA-DAS lab +Org: https://hf.co/ISTA-DASLab +Papers: https://hf.co/papers/2401.06118 and https://hf.co/papers/2310.16795 + +🚀Community releases +Einstein-v4-7B, a Mistral fine-tune on high-quality data https://hf.co/Weyaxi/Einstein-v4-7B +IL-7B, a Misttral fine-tune merge for rheumatology https://hf.co/cmcmaster/il_7b + Caselaw Access Project, a collaboration to digitalize 40 million US court decisions from 6.7 million cases from 360 years https://hf.co/datasets/TeraflopAI/Caselaw_Access_Project + +🌍Data and models around the world +HPLT Monolingual, a dataset of 75 languages with over 40TB of data https://hf.co/datasets/HPLT/hplt_monolingual_v1_2 +OpenLLM Turkish Benchmarks & Leaderboard https://hf.co/collections/malhajar/openllmturkishleadboard-datasets-65e5854490a87c0f2670ec18 and https://hf.co/spaces/malhajar/OpenLLMTurkishLeaderboard +Occiglot, a collaborative effort for European LLMs with an initial release of 7B models for French, German, Spanish, and Italian https://hf.co/collections/occiglot/occiglot-eu5-7b-v01-65dbed502a6348b052695e01 +Guftagoo, a Hindi+Hinglish multi-turn conversational dataset https://hf.co/datasets/Tensoic/gooftagoo +AryaBhatta-Orca-Maths-Hindi dataset https://hf.co/datasets/GenVRadmin/Aryabhatta-Orca-Maths-Hindi",[],[],"[{'reaction': '👍', 'users': ['ajibawa-2023', 'YaTharThShaRma999', 'normanschizogh', 'samusenps', 'Euclid-Jie', 'lunarflu', 'Priceva', 'Andron00e', 'not-lain', 'vapuck'], 'count': 10}, {'reaction': '❤️', 'users': ['lunarflu', 'not-lain'], 'count': 2}, {'reaction': '🤗', 'users': ['lunarflu', 'not-lain'], 'count': 2}]",2024-03-19 16:23:57,2024-03-19 16:34:06.410,"[{'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}]",/posts/osanseviero/585071145791233,1944,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,497093114781728,"[{'type': 'text', 'value': 'Fast High-Resolution Image Synthesis with Latent Adversarial Diffusion Distillation', 'raw': 'Fast High-Resolution Image Synthesis with Latent Adversarial Diffusion Distillation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.12015'}, 'url': 'https://huggingface.co/papers/2403.12015', 'raw': 'https://huggingface.co/papers/2403.12015', 'label': 'Fast High-Resolution Image Synthesis with Latent Adversarial Diffusion\n Distillation (2403.12015)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Diffusion models are the main driver of progress in image and video synthesis, but suffer from slow inference speed. Distillation methods, like the recently introduced adversarial diffusion distillation (ADD) aim to shift the model from many-shot to single-step inference, albeit at the cost of expensive and difficult optimization due to its reliance on a fixed pretrained DINOv2 discriminator. We introduce Latent Adversarial Diffusion Distillation (LADD), a novel distillation approach overcoming the limitations of ADD. In contrast to pixel-based ADD, LADD utilizes generative features from pretrained latent diffusion models. This approach simplifies training and enhances performance, enabling high-resolution multi-aspect ratio image synthesis. We apply LADD to Stable Diffusion 3 (8B) to obtain SD3-Turbo, a fast model that matches the performance of state-of-the-art text-to-image generators using only four unguided sampling steps. Moreover, we systematically investigate its scaling behavior and demonstrate LADD's effectiveness in various applications such as image editing and inpainting."", 'raw': ""Diffusion models are the main driver of progress in image and video synthesis, but suffer from slow inference speed. Distillation methods, like the recently introduced adversarial diffusion distillation (ADD) aim to shift the model from many-shot to single-step inference, albeit at the cost of expensive and difficult optimization due to its reliance on a fixed pretrained DINOv2 discriminator. We introduce Latent Adversarial Diffusion Distillation (LADD), a novel distillation approach overcoming the limitations of ADD. In contrast to pixel-based ADD, LADD utilizes generative features from pretrained latent diffusion models. This approach simplifies training and enhances performance, enabling high-resolution multi-aspect ratio image synthesis. We apply LADD to Stable Diffusion 3 (8B) to obtain SD3-Turbo, a fast model that matches the performance of state-of-the-art text-to-image generators using only four unguided sampling steps. Moreover, we systematically investigate its scaling behavior and demonstrate LADD's effectiveness in various applications such as image editing and inpainting.""}, {'type': 'new_line', 'raw': '\n'}]","Fast High-Resolution Image Synthesis with Latent Adversarial Diffusion Distillation + +https://huggingface.co/papers/2403.12015 + +Diffusion models are the main driver of progress in image and video synthesis, but suffer from slow inference speed. Distillation methods, like the recently introduced adversarial diffusion distillation (ADD) aim to shift the model from many-shot to single-step inference, albeit at the cost of expensive and difficult optimization due to its reliance on a fixed pretrained DINOv2 discriminator. We introduce Latent Adversarial Diffusion Distillation (LADD), a novel distillation approach overcoming the limitations of ADD. In contrast to pixel-based ADD, LADD utilizes generative features from pretrained latent diffusion models. This approach simplifies training and enhances performance, enabling high-resolution multi-aspect ratio image synthesis. We apply LADD to Stable Diffusion 3 (8B) to obtain SD3-Turbo, a fast model that matches the performance of state-of-the-art text-to-image generators using only four unguided sampling steps. Moreover, we systematically investigate its scaling behavior and demonstrate LADD's effectiveness in various applications such as image editing and inpainting. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/_Z6ZDKQqw_qEWnjt3fD6z.png'}]",[],"[{'reaction': '❤️', 'users': ['clem', 'JayMokoena', 'samusenps', 'krzysztofpapciak'], 'count': 4}, {'reaction': '🔥', 'users': ['mathiasn1', 'taohu'], 'count': 2}]",2024-03-19 14:28:23,2024-03-19 14:28:23.356,[],/posts/akhaliq/497093114781728,2236,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png,2157.0,mrfakename,mrfakename,680129047887393,"[{'type': 'text', 'value': ""Today, I'm excited to launch two new models on the TTS Arena: MeloTTS and StyleTTS 2. Both are open sourced, permissively licensed, and highly efficient."", 'raw': ""Today, I'm excited to launch two new models on the TTS Arena: MeloTTS and StyleTTS 2. Both are open sourced, permissively licensed, and highly efficient.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Curious to see how they compare with other leading models? Vote on the TTS Arena ⬇️', 'raw': 'Curious to see how they compare with other leading models? Vote on the TTS Arena ⬇️'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'TTS-AGI/TTS-Arena'}, 'url': 'https://huggingface.co/spaces/TTS-AGI/TTS-Arena', 'raw': 'https://huggingface.co/spaces/TTS-AGI/TTS-Arena'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MeloTTS, released by MyShell AI, provides realistic and lifelike text to speech while remaining efficient and fast, even when running on CPU. It supports a variety of languages, including but not limited to English, French, Chinese, and Japanese.', 'raw': 'MeloTTS, released by MyShell AI, provides realistic and lifelike text to speech while remaining efficient and fast, even when running on CPU. It supports a variety of languages, including but not limited to English, French, Chinese, and Japanese.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""StyleTTS 2 is another fully open sourced text to speech framework. It's permissively licensed, highly-efficient, and supports voice cloning and longform narration. It also provides natural and lifelike speech."", 'raw': ""StyleTTS 2 is another fully open sourced text to speech framework. It's permissively licensed, highly-efficient, and supports voice cloning and longform narration. It also provides natural and lifelike speech.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Both are available now to try on the TTS Arena - vote to find which one is better! The leaderboard will be revealed once we collect enough votes.', 'raw': 'Both are available now to try on the TTS Arena - vote to find which one is better! The leaderboard will be revealed once we collect enough votes.'}]","Today, I'm excited to launch two new models on the TTS Arena: MeloTTS and StyleTTS 2. Both are open sourced, permissively licensed, and highly efficient. + +Curious to see how they compare with other leading models? Vote on the TTS Arena ⬇️ + +https://huggingface.co/spaces/TTS-AGI/TTS-Arena + +MeloTTS, released by MyShell AI, provides realistic and lifelike text to speech while remaining efficient and fast, even when running on CPU. It supports a variety of languages, including but not limited to English, French, Chinese, and Japanese. + +StyleTTS 2 is another fully open sourced text to speech framework. It's permissively licensed, highly-efficient, and supports voice cloning and longform narration. It also provides natural and lifelike speech. + +Both are available now to try on the TTS Arena - vote to find which one is better! The leaderboard will be revealed once we collect enough votes.",[],[],"[{'reaction': '🔥', 'users': ['Kukedlc', 'samusenps', 'osanseviero', 'theamazinceo', 'DmitryRyumin', 'AARon99', 'taufiqdp', 'victor', 'clem', 'diwank', 'jbilcke-hf', 'Hyperionllama', 'zironycho', 'pierrci', 'mirellyssl'], 'count': 15}]",2024-03-19 00:45:37,2024-11-26 23:34:29.323,"[{'_id': '66d1fa61ad293ffc4b1d035b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/66d1fa61ad293ffc4b1d035b/DQ2w7UUN-dPnIdmpjpgYs.png', 'fullname': 'Patrick levy-Rosenthal', 'name': 'metasoulone', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '62e54f0eae9d3f10acb95cb9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png', 'fullname': 'mrfakename', 'name': 'mrfakename', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2157, 'isFollowing': False}]",/posts/mrfakename/680129047887393,4262,,14 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857746553-5df7e9e5da6d0311fd3d53f9.jpeg,1173.0,Thomas Wolf,thomwolf,536867538229257,"[{'type': 'text', 'value': 'so what was your favorite or most surprising announcement from GTC 2024?', 'raw': 'so what was your favorite or most surprising announcement from GTC 2024?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.youtube.com/live/Y2F8yisiS6E?si=shoZxQMHo_TTptCg', 'raw': 'https://www.youtube.com/live/Y2F8yisiS6E?si=shoZxQMHo_TTptCg'}]","so what was your favorite or most surprising announcement from GTC 2024? +https://www.youtube.com/live/Y2F8yisiS6E?si=shoZxQMHo_TTptCg",[],[],"[{'reaction': '👀', 'users': ['clem', 'mishig', 'julien-c', 'OmbelineM'], 'count': 4}]",2024-03-18 23:24:08,2024-03-19 13:35:16.429,"[{'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}, {'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/thomwolf/536867538229257,2881,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,187303171643734,"[{'type': 'text', 'value': 'Uni-SMART', 'raw': 'Uni-SMART'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Universal Science Multimodal Analysis and Research Transformer', 'raw': 'Universal Science Multimodal Analysis and Research Transformer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.10301'}, 'url': 'https://huggingface.co/papers/2403.10301', 'raw': 'https://huggingface.co/papers/2403.10301', 'label': 'Uni-SMART: Universal Science Multimodal Analysis and Research\n Transformer (2403.10301)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In scientific research and its application, scientific literature analysis is crucial as it allows researchers to build on the work of others. However, the fast growth of scientific knowledge has led to a massive increase in scholarly articles, making in-depth literature analysis increasingly challenging and time-consuming. The emergence of Large Language Models (LLMs) has offered a new way to address this challenge. Known for their strong abilities in summarizing texts, LLMs are seen as a potential tool to improve the analysis of scientific literature. However, existing LLMs have their own limits. Scientific literature often includes a wide range of multimodal elements, such as molecular structure, tables, and charts, which are hard for text-focused LLMs to understand and analyze. This issue points to the urgent need for new solutions that can fully understand and analyze multimodal content in scientific literature. To answer this demand, we present Uni-SMART (Universal Science Multimodal Analysis and Research Transformer), an innovative model designed for in-depth understanding of multimodal scientific literature. Through rigorous quantitative evaluation across several domains, Uni-SMART demonstrates superior performance over leading text-focused LLMs. Furthermore, our exploration extends to practical applications, including patent infringement detection and nuanced analysis of charts. These applications not only highlight Uni-SMART's adaptability but also its potential to revolutionize how we interact with scientific literature."", 'raw': ""In scientific research and its application, scientific literature analysis is crucial as it allows researchers to build on the work of others. However, the fast growth of scientific knowledge has led to a massive increase in scholarly articles, making in-depth literature analysis increasingly challenging and time-consuming. The emergence of Large Language Models (LLMs) has offered a new way to address this challenge. Known for their strong abilities in summarizing texts, LLMs are seen as a potential tool to improve the analysis of scientific literature. However, existing LLMs have their own limits. Scientific literature often includes a wide range of multimodal elements, such as molecular structure, tables, and charts, which are hard for text-focused LLMs to understand and analyze. This issue points to the urgent need for new solutions that can fully understand and analyze multimodal content in scientific literature. To answer this demand, we present Uni-SMART (Universal Science Multimodal Analysis and Research Transformer), an innovative model designed for in-depth understanding of multimodal scientific literature. Through rigorous quantitative evaluation across several domains, Uni-SMART demonstrates superior performance over leading text-focused LLMs. Furthermore, our exploration extends to practical applications, including patent infringement detection and nuanced analysis of charts. These applications not only highlight Uni-SMART's adaptability but also its potential to revolutionize how we interact with scientific literature.""}, {'type': 'new_line', 'raw': '\n'}]","Uni-SMART + +Universal Science Multimodal Analysis and Research Transformer + +https://huggingface.co/papers/2403.10301 + +In scientific research and its application, scientific literature analysis is crucial as it allows researchers to build on the work of others. However, the fast growth of scientific knowledge has led to a massive increase in scholarly articles, making in-depth literature analysis increasingly challenging and time-consuming. The emergence of Large Language Models (LLMs) has offered a new way to address this challenge. Known for their strong abilities in summarizing texts, LLMs are seen as a potential tool to improve the analysis of scientific literature. However, existing LLMs have their own limits. Scientific literature often includes a wide range of multimodal elements, such as molecular structure, tables, and charts, which are hard for text-focused LLMs to understand and analyze. This issue points to the urgent need for new solutions that can fully understand and analyze multimodal content in scientific literature. To answer this demand, we present Uni-SMART (Universal Science Multimodal Analysis and Research Transformer), an innovative model designed for in-depth understanding of multimodal scientific literature. Through rigorous quantitative evaluation across several domains, Uni-SMART demonstrates superior performance over leading text-focused LLMs. Furthermore, our exploration extends to practical applications, including patent infringement detection and nuanced analysis of charts. These applications not only highlight Uni-SMART's adaptability but also its potential to revolutionize how we interact with scientific literature. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/GzeIHBeQ5eva5pJamjMT2.png'}]",[],"[{'reaction': '👍', 'users': ['samusenps', 'muhtasham', 'osanseviero', 'hypnopump', 'ajibawa-2023', 'clem', 'AtAndDev'], 'count': 7}, {'reaction': '🚀', 'users': ['samusenps', 'muhtasham', 'hypnopump', 'clem', 'AtAndDev'], 'count': 5}]",2024-03-18 16:59:38,2024-03-18 16:59:38.188,[],/posts/akhaliq/187303171643734,3267,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,770487932075465,"[{'type': 'text', 'value': 'Our 🐑 PECoRe 🐑 method to detect & attribute context usage in LM generations finally has an official Gradio demo! 🚀', 'raw': 'Our 🐑 PECoRe 🐑 method to detect & attribute context usage in LM generations finally has an official Gradio demo! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'gsarti/pecore'}, 'url': 'https://huggingface.co/spaces/gsarti/pecore', 'raw': 'https://huggingface.co/spaces/gsarti/pecore'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Highlights:', 'raw': 'Highlights:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Context attribution for several decoder-only and encoder-decoder models using convenient presets', 'raw': '🔍 Context attribution for several decoder-only and encoder-decoder models using convenient presets'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Uses only LM internals to faithfully reflect context usage, no additional detector involved', 'raw': '🔍 Uses only LM internals to faithfully reflect context usage, no additional detector involved'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Highly parametrizable, export Python & Shell code snippets to run on your machine using 🐛 Inseq CLI (', 'raw': '🔍 Highly parametrizable, export Python & Shell code snippets to run on your machine using 🐛 Inseq CLI ('}, {'type': 'link', 'href': 'https://github.com/inseq-team/inseq', 'raw': 'https://github.com/inseq-team/inseq'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Want to use PECoRe for your LMs? Feedback and comments are welcome! 🤗', 'raw': 'Want to use PECoRe for your LMs? Feedback and comments are welcome! 🤗'}, {'type': 'new_line', 'raw': '\n'}]","Our 🐑 PECoRe 🐑 method to detect & attribute context usage in LM generations finally has an official Gradio demo! 🚀 + +https://huggingface.co/spaces/gsarti/pecore + +Highlights: +🔍 Context attribution for several decoder-only and encoder-decoder models using convenient presets +🔍 Uses only LM internals to faithfully reflect context usage, no additional detector involved +🔍 Highly parametrizable, export Python & Shell code snippets to run on your machine using 🐛 Inseq CLI (https://github.com/inseq-team/inseq) + +Want to use PECoRe for your LMs? Feedback and comments are welcome! 🤗 +",[],[],"[{'reaction': '❤️', 'users': ['samusenps', 'muhtasham', 'clem', 'louisbrulenaudet', 'santiviquez'], 'count': 5}]",2024-03-18 14:45:58,2024-03-21 09:07:44.600,"[{'_id': '5e7749883d77a72421292d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg', 'fullname': 'Gabriele Sarti', 'name': 'gsarti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 226, 'isFollowing': False}, {'_id': '629a173153a72d997d3f57d0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg', 'fullname': 'Santiago Viquez', 'name': 'santiviquez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 86, 'isFollowing': False}]",/posts/gsarti/770487932075465,2219,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,399329903001758,"[{'type': 'text', 'value': '🚀🕺🌟 New Research Alert - CVPR 2024! 🌟 💃🏻🚀', 'raw': '🚀🕺🌟 New Research Alert - CVPR 2024! 🌟 💃🏻🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: NECA: Neural Customizable Human Avatar 🌟🚀', 'raw': '📄 Title: NECA: Neural Customizable Human Avatar 🌟🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: The NECA paper presents a novel method for creating customizable human avatars from video, allowing detailed manipulation of pose, shadow, shape, lighting, and texture for realistic rendering and editing.', 'raw': '📝 Description: The NECA paper presents a novel method for creating customizable human avatars from video, allowing detailed manipulation of pose, shadow, shape, lighting, and texture for realistic rendering and editing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Junjin Xiao, Qing Zhang, Zhan Xu, and Wei-Shi Zheng', 'raw': '👥 Authors: Junjin Xiao, Qing Zhang, Zhan Xu, and Wei-Shi Zheng'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸', 'raw': '📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.10335'}, 'url': 'https://huggingface.co/papers/2403.10335', 'raw': 'https://huggingface.co/papers/2403.10335', 'label': 'NECA: Neural Customizable Human Avatar (2403.10335)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/iSEE-Laboratory/NECA', 'raw': 'https://github.com/iSEE-Laboratory/NECA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #NECA #AvatarCustomization #RealisticRendering #HumanRepresentation #CVPR2024 #DeepLearning #Animation #Innovation', 'raw': '🔍 Keywords: #NECA #AvatarCustomization #RealisticRendering #HumanRepresentation #CVPR2024 #DeepLearning #Animation #Innovation'}]","🚀🕺🌟 New Research Alert - CVPR 2024! 🌟 💃🏻🚀 +📄 Title: NECA: Neural Customizable Human Avatar 🌟🚀 + +📝 Description: The NECA paper presents a novel method for creating customizable human avatars from video, allowing detailed manipulation of pose, shadow, shape, lighting, and texture for realistic rendering and editing. + +👥 Authors: Junjin Xiao, Qing Zhang, Zhan Xu, and Wei-Shi Zheng + +📅 Conference: CVPR, Jun 17-21, 2024 | Seattle WA, USA 🇺🇸 + +🔗 Paper: https://huggingface.co/papers/2403.10335 + +📁 Repository: https://github.com/iSEE-Laboratory/NECA + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +🔍 Keywords: #NECA #AvatarCustomization #RealisticRendering #HumanRepresentation #CVPR2024 #DeepLearning #Animation #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/vQPBLyXTQ4KSflLT8qw9_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/N9MR8DEIIHfzndChRVXvT.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/qDg0JxXyBKSIolQIko31J.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/my2nrsK4N12kk7KrJShBC.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/UD95hXxppcFrz6eIqMlOB.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '👍', 'users': ['DmitryRyumin', 'osanseviero', 'victor', 'samusenps', 'clem', 'nvspavankalyanch', 'mexicanamerican'], 'count': 7}, {'reaction': '🔥', 'users': ['nvspavankalyanch'], 'count': 1}]",2024-03-18 10:42:54,2024-03-18 10:42:54.114,[],/posts/DmitryRyumin/399329903001758,1927,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg,211.0,Bram Vanroy,BramVanroy,792998889340102,"[{'type': 'text', 'value': '🖴 The HPLT monolingual dataset has a new home!', 'raw': '🖴 The HPLT monolingual dataset has a new home!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""After being in touch with HPLT folks, I've transfered the data to their org. That only makes sense. You can find it below."", 'raw': ""After being in touch with HPLT folks, I've transfered the data to their org. That only makes sense. You can find it below.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'HPLT/hplt_monolingual_v1_2'}, 'url': 'https://huggingface.co/datasets/HPLT/hplt_monolingual_v1_2', 'raw': 'https://huggingface.co/datasets/HPLT/hplt_monolingual_v1_2'}]","🖴 The HPLT monolingual dataset has a new home! + +After being in touch with HPLT folks, I've transfered the data to their org. That only makes sense. You can find it below. + +https://huggingface.co/datasets/HPLT/hplt_monolingual_v1_2",[],[],"[{'reaction': '🚀', 'users': ['osanseviero', 'clem'], 'count': 2}]",2024-03-18 09:43:02,2024-09-28 22:09:06.471,"[{'_id': '5e6a3d4ea9afd5125d9ec064', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1584020801691-noauth.jpeg', 'fullname': 'Stefan Schweter', 'name': 'stefan-it', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2805, 'isFollowing': False}, {'_id': '5e1e17b6fcf41d740b6996a8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg', 'fullname': 'Bram Vanroy', 'name': 'BramVanroy', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 211, 'isFollowing': False}, {'_id': '6147363543eb04c443cd4e39', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6147363543eb04c443cd4e39/Ofw2_zBsPPpj1LovQep0L.jpeg', 'fullname': 'Meliksah Turker', 'name': 'meliksahturker', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}]",/posts/BramVanroy/792998889340102,1731,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg,3938.0,chansung park,chansung,716968829982789,"[{'type': 'text', 'value': '🎥 🤾 Vid2Persona: talk to person from video clip', 'raw': '🎥 🤾 Vid2Persona: talk to person from video clip'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A fun project over the last week with ', 'raw': 'A fun project over the last week with '}, {'type': 'mention', 'user': 'sayakpaul', 'raw': '@sayakpaul'}, {'type': 'text', 'value': '. It has a simple pipeline from extracting traits of video characters to chatting with them.', 'raw': '. It has a simple pipeline from extracting traits of video characters to chatting with them.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Under the hood, this project leverages the power of both commercial and open source models. We used Google's Gemini 1.0 Pro Vision model to understand the video content directly, then we used HuggingFaceH4/zephyr-7b-beta model to make conversation!"", 'raw': ""Under the hood, this project leverages the power of both commercial and open source models. We used Google's Gemini 1.0 Pro Vision model to understand the video content directly, then we used HuggingFaceH4/zephyr-7b-beta model to make conversation!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it Hugging Face Space and let us know what you think.', 'raw': 'Try it Hugging Face Space and let us know what you think.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ': ', 'raw': ': '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'chansung/vid2persona'}, 'url': 'https://huggingface.co/spaces/chansung/vid2persona', 'raw': 'https://huggingface.co/spaces/chansung/vid2persona'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The space application is a dedicated implementation for ZeroGPU environment + Hugging Face Inference API with PRO account. If you wish to host it on your own environment, consider duplicate the space or run locally with the project repository', 'raw': 'The space application is a dedicated implementation for ZeroGPU environment + Hugging Face Inference API with PRO account. If you wish to host it on your own environment, consider duplicate the space or run locally with the project repository'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ': ', 'raw': ': '}, {'type': 'link', 'href': 'https://github.com/deep-diver/Vid2Persona', 'raw': 'https://github.com/deep-diver/Vid2Persona'}]","🎥 🤾 Vid2Persona: talk to person from video clip + +A fun project over the last week with @sayakpaul. It has a simple pipeline from extracting traits of video characters to chatting with them. + +Under the hood, this project leverages the power of both commercial and open source models. We used Google's Gemini 1.0 Pro Vision model to understand the video content directly, then we used HuggingFaceH4/zephyr-7b-beta model to make conversation! + +Try it Hugging Face Space and let us know what you think. +: https://huggingface.co/spaces/chansung/vid2persona + +The space application is a dedicated implementation for ZeroGPU environment + Hugging Face Inference API with PRO account. If you wish to host it on your own environment, consider duplicate the space or run locally with the project repository +: https://github.com/deep-diver/Vid2Persona",[],"[{'_id': '5f7fbd813e94f16a85448745', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1649681653581-5f7fbd813e94f16a85448745.jpeg', 'fullname': 'Sayak Paul', 'name': 'sayakpaul', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 657}]","[{'reaction': '🤗', 'users': ['chansung', 'anonyme789', 'Chan-Y', 'victor', 'osanseviero', 'xprilion'], 'count': 6}, {'reaction': '🤯', 'users': ['anonyme789', 'Srulikbd', 'osanseviero', 'xprilion'], 'count': 4}]",2024-03-12 00:54:34,2024-03-12 00:54:34.113,[],/posts/chansung/716968829982789,830,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594311341799-5f07383b19cb630495b812cd.jpeg,107.0,Stas Bekman,stas,984424866637646,"[{'type': 'text', 'value': ""If you're trying to run MoE Mixtral-8x7b under DeepSpeed w/ HF Transformers it's likely to hang on the first forward."", 'raw': ""If you're trying to run MoE Mixtral-8x7b under DeepSpeed w/ HF Transformers it's likely to hang on the first forward.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The solution is here ', 'raw': 'The solution is here '}, {'type': 'link', 'href': 'https://github.com/microsoft/DeepSpeed/pull/4966?_x_tr_sl=auto&_x_tr_tl=en&_x_tr_hl=en-US#issuecomment-1989671378', 'raw': 'https://github.com/microsoft/DeepSpeed/pull/4966?_x_tr_sl=auto&_x_tr_tl=en&_x_tr_hl=en-US#issuecomment-1989671378'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and you need deepspeed>=0.13.0', 'raw': 'and you need deepspeed>=0.13.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks to Masahiro Tanaka for the fix.', 'raw': 'Thanks to Masahiro Tanaka for the fix.'}]","If you're trying to run MoE Mixtral-8x7b under DeepSpeed w/ HF Transformers it's likely to hang on the first forward. + +The solution is here https://github.com/microsoft/DeepSpeed/pull/4966?_x_tr_sl=auto&_x_tr_tl=en&_x_tr_hl=en-US#issuecomment-1989671378 + +and you need deepspeed>=0.13.0 + +Thanks to Masahiro Tanaka for the fix.",[],[],"[{'reaction': '👍', 'users': ['nss-ysasaki', 'sitloboi2012', 'theainerd', 'osanseviero', 'victor', 'muhtasham', 'hangzhang-nlp'], 'count': 7}]",2024-03-12 00:14:41,2024-03-12 00:15:22.852,[],/posts/stas/984424866637646,581,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,188560282332993,"[{'type': 'text', 'value': '🚀🕺🌟 New Research Alert - AAAI 2024! 🌟💃🚀', 'raw': '🚀🕺🌟 New Research Alert - AAAI 2024! 🌟💃🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: Relightable and Animatable Neural Avatars from Videos 🌟🚀', 'raw': '📄 Title: Relightable and Animatable Neural Avatars from Videos 🌟🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝 Description: Relightable & animatable neural avatars from sparse videos.', 'raw': '📝 Description: Relightable & animatable neural avatars from sparse videos.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Wenbin Lin, Chengwei Zheng, Jun-Hai Yong, and Feng Xu', 'raw': '👥 Authors: Wenbin Lin, Chengwei Zheng, Jun-Hai Yong, and Feng Xu'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: AAAI, February 20-27, 2024 | Vancouver, Canada 🇨🇦', 'raw': '📅 Conference: AAAI, February 20-27, 2024 | Vancouver, Canada 🇨🇦'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2312.12877'}, 'url': 'https://huggingface.co/papers/2312.12877', 'raw': 'https://huggingface.co/papers/2312.12877', 'label': 'Relightable and Animatable Neural Avatars from Videos (2312.12877)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🌐 Github Page: ', 'raw': '🌐 Github Page: '}, {'type': 'link', 'href': 'https://wenbin-lin.github.io/RelightableAvatar-page', 'raw': 'https://wenbin-lin.github.io/RelightableAvatar-page'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📁 Repository: ', 'raw': '📁 Repository: '}, {'type': 'link', 'href': 'https://github.com/wenbin-lin/RelightableAvatar', 'raw': 'https://github.com/wenbin-lin/RelightableAvatar'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📺 Video: ', 'raw': '📺 Video: '}, {'type': 'link', 'href': 'https://www.youtube.com/watch?v=v9rlys0xQGo', 'raw': 'https://www.youtube.com/watch?v=v9rlys0xQGo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 Added to the AAAI 2024 Papers: ', 'raw': '📚 Added to the AAAI 2024 Papers: '}, {'type': 'link', 'href': 'https://github.com/DmitryRyumin/AAAI-2024-Papers', 'raw': 'https://github.com/DmitryRyumin/AAAI-2024-Papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #NeuralAvatar #RelightableAvatars #AnimatableAvatars #3DModeling #PhotorealisticRendering #ShadowModeling #DigitalAvatars #GeometryModeling #AAAI2024 #DeepLearning #Animation #Innovation', 'raw': '🔍 Keywords: #NeuralAvatar #RelightableAvatars #AnimatableAvatars #3DModeling #PhotorealisticRendering #ShadowModeling #DigitalAvatars #GeometryModeling #AAAI2024 #DeepLearning #Animation #Innovation'}]","🚀🕺🌟 New Research Alert - AAAI 2024! 🌟💃🚀 +📄 Title: Relightable and Animatable Neural Avatars from Videos 🌟🚀 + +📝 Description: Relightable & animatable neural avatars from sparse videos. + +👥 Authors: Wenbin Lin, Chengwei Zheng, Jun-Hai Yong, and Feng Xu + +📅 Conference: AAAI, February 20-27, 2024 | Vancouver, Canada 🇨🇦 + +🔗 Paper: https://huggingface.co/papers/2312.12877 + +🌐 Github Page: https://wenbin-lin.github.io/RelightableAvatar-page +📁 Repository: https://github.com/wenbin-lin/RelightableAvatar + +📺 Video: https://www.youtube.com/watch?v=v9rlys0xQGo + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36 + +📚 Added to the AAAI 2024 Papers: https://github.com/DmitryRyumin/AAAI-2024-Papers + +🔍 Keywords: #NeuralAvatar #RelightableAvatars #AnimatableAvatars #3DModeling #PhotorealisticRendering #ShadowModeling #DigitalAvatars #GeometryModeling #AAAI2024 #DeepLearning #Animation #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/LoYx0_jpXx8VsmFxmLvD3.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/jkZid-IoTAKGZqY1airI5.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/HDhU2r7xjVO3i5VhkqYXi.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/L7_KilMltwIpDs8T1oMET.png'}]","[{'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}]","[{'reaction': '❤️', 'users': ['DmitryRyumin', 'samusenps', 'osanseviero', 'sitloboi2012', 'Lician'], 'count': 5}, {'reaction': '👍', 'users': ['samusenps', 'dashfunnydashdash'], 'count': 2}, {'reaction': '😎', 'users': ['victor'], 'count': 1}]",2024-03-11 21:05:52,2024-03-11 23:26:25.187,[],/posts/DmitryRyumin/188560282332993,112,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg,2738.0,Julien Chaumond,julien-c,400007373757162,"[{'type': 'text', 'value': '671 authors 🤯', 'raw': '671 authors 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.05530'}, 'url': 'https://huggingface.co/papers/2403.05530', 'raw': 'https://huggingface.co/papers/2403.05530', 'label': 'Gemini 1.5: Unlocking multimodal understanding across millions of tokens\n of context (2403.05530)'}, {'type': 'text', 'value': ' Gemini 1.5: Unlocking multimodal understanding across millions of tokens of context', 'raw': ' Gemini 1.5: Unlocking multimodal understanding across millions of tokens of context'}]","671 authors 🤯 + +https://huggingface.co/papers/2403.05530 Gemini 1.5: Unlocking multimodal understanding across millions of tokens of context",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'muhtasham', 'dvilasuero', 'pabloce', 'diwank'], 'count': 5}]",2024-03-11 17:05:07,2024-03-13 11:49:42.217,"[{'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}, {'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}]",/posts/julien-c/400007373757162,626,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,391996089229129,"[{'type': 'text', 'value': 'New foundation model on document understanding and generation in transformers 🤩', 'raw': 'New foundation model on document understanding and generation in transformers 🤩'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'UDOP by MSFT is a bleeding-edge model that is capable of many tasks, including question answering, document editing and more! 🤯', 'raw': 'UDOP by MSFT is a bleeding-edge model that is capable of many tasks, including question answering, document editing and more! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo 👉 ', 'raw': 'Demo 👉 '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'merve/UDOP'}, 'url': 'https://huggingface.co/spaces/merve/UDOP', 'raw': 'https://huggingface.co/spaces/merve/UDOP'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It is a model that combines vision, text and layout. 📝', 'raw': 'It is a model that combines vision, text and layout. 📝'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This model is very interesting because the input representation truly captures the nature of the document modality: text, where the text is, and the layout of the document matters!', 'raw': 'This model is very interesting because the input representation truly captures the nature of the document modality: text, where the text is, and the layout of the document matters!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""If you know T5, it resembles that: it's pre-trained on both self-supervised and supervised objectives over text, image and layout."", 'raw': ""If you know T5, it resembles that: it's pre-trained on both self-supervised and supervised objectives over text, image and layout.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To switch between tasks, one simply needs to change the task specific prompt at the beginning, e.g. for QA, one prepends with Question answering. ', 'raw': 'To switch between tasks, one simply needs to change the task specific prompt at the beginning, e.g. for QA, one prepends with Question answering. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""As for the architecture, it's like T5, except it has a single encoder that takes in text, image and layout, and two decoders (text-layout and vision decoders) combined into one."", 'raw': ""As for the architecture, it's like T5, except it has a single encoder that takes in text, image and layout, and two decoders (text-layout and vision decoders) combined into one.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The vision decoder is a masked autoencoder (thus the capabilities of document editing).', 'raw': 'The vision decoder is a masked autoencoder (thus the capabilities of document editing).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""For me, the most interesting capability is document reconstruction, document editing and layout re-arrangement. This decoder isn't released though because it could be used maliciously to fake document editing."", 'raw': ""For me, the most interesting capability is document reconstruction, document editing and layout re-arrangement. This decoder isn't released though because it could be used maliciously to fake document editing.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Overall, the model performs very well on document understanding benchmark (DUE) and also information extraction (FUNSD, CORD) and classification (RVL-CDIP) for vision, text, layout modalities.', 'raw': 'Overall, the model performs very well on document understanding benchmark (DUE) and also information extraction (FUNSD, CORD) and classification (RVL-CDIP) for vision, text, layout modalities.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can learn more about the model from below resources (h/t to ', 'raw': 'You can learn more about the model from below resources (h/t to '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'nielsr', 'raw': '@nielsr'}, {'type': 'text', 'value': '), thanks a lot for reading 🤗', 'raw': '), thanks a lot for reading 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Docs: ', 'raw': 'Docs: '}, {'type': 'link', 'href': 'https://huggingface.co/docs/transformers/main/en/model_doc/udop', 'raw': 'https://huggingface.co/docs/transformers/main/en/model_doc/udop'}, {'type': 'text', 'value': ' 📚 ', 'raw': ' 📚 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Checkpoints: ', 'raw': 'Checkpoints: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'microsoft/udop-65e625124aee97415b88b513'}, 'url': 'https://huggingface.co/collections/microsoft/udop-65e625124aee97415b88b513', 'raw': 'https://huggingface.co/collections/microsoft/udop-65e625124aee97415b88b513'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo notebooks: ', 'raw': 'Demo notebooks: '}, {'type': 'link', 'href': 'https://github.com/NielsRogge/Transformers-Tutorials/tree/master/UDOP', 'raw': 'https://github.com/NielsRogge/Transformers-Tutorials/tree/master/UDOP'}, {'type': 'text', 'value': ' 📕', 'raw': ' 📕'}]","New foundation model on document understanding and generation in transformers 🤩 +UDOP by MSFT is a bleeding-edge model that is capable of many tasks, including question answering, document editing and more! 🤯 +Demo 👉 https://huggingface.co/spaces/merve/UDOP +It is a model that combines vision, text and layout. 📝 +This model is very interesting because the input representation truly captures the nature of the document modality: text, where the text is, and the layout of the document matters! +If you know T5, it resembles that: it's pre-trained on both self-supervised and supervised objectives over text, image and layout. +To switch between tasks, one simply needs to change the task specific prompt at the beginning, e.g. for QA, one prepends with Question answering. +As for the architecture, it's like T5, except it has a single encoder that takes in text, image and layout, and two decoders (text-layout and vision decoders) combined into one. +The vision decoder is a masked autoencoder (thus the capabilities of document editing). +For me, the most interesting capability is document reconstruction, document editing and layout re-arrangement. This decoder isn't released though because it could be used maliciously to fake document editing. +Overall, the model performs very well on document understanding benchmark (DUE) and also information extraction (FUNSD, CORD) and classification (RVL-CDIP) for vision, text, layout modalities. +You can learn more about the model from below resources (h/t to +@nielsr), thanks a lot for reading 🤗 +Docs: https://huggingface.co/docs/transformers/main/en/model_doc/udop 📚 +Checkpoints: https://huggingface.co/collections/microsoft/udop-65e625124aee97415b88b513 +Demo notebooks: https://github.com/NielsRogge/Transformers-Tutorials/tree/master/UDOP 📕","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/98QyX-MW_OkbIdDZbSa3M.png'}]","[{'_id': '5f1158120c833276f61f1a84', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1608042047613-5f1158120c833276f61f1a84.jpeg', 'fullname': 'Niels Rogge', 'name': 'nielsr', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 904}]","[{'reaction': '❤️', 'users': ['samusenps', 'rajistics', 'not-lain', 'cstr', 'diwank'], 'count': 5}]",2024-03-11 16:59:02,2024-03-11 16:59:25.239,[],/posts/merve/391996089229129,520,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,263130686669707,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Information Flow Routes: Automatically Interpreting Language Models at Scale by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Information Flow Routes: Automatically Interpreting Language Models at Scale by ""}, {'type': 'mention', 'user': 'javifer', 'raw': '@javifer'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'lena-voita', 'raw': '@lena-voita'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This work presents a novel method to identify salient components in Transformer-based language models by decomposing the contribution of various model components into the residual stream.', 'raw': 'This work presents a novel method to identify salient components in Transformer-based language models by decomposing the contribution of various model components into the residual stream.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This method is more efficient and scalable than previous techniques such as activation patching, as it only requires a single forward pass through the model to identify critical information flow paths. Moreover, it can be applied without a contrastive template, which is observed to produce results dependent on the selected contrastive example for activation patching.', 'raw': 'This method is more efficient and scalable than previous techniques such as activation patching, as it only requires a single forward pass through the model to identify critical information flow paths. Moreover, it can be applied without a contrastive template, which is observed to produce results dependent on the selected contrastive example for activation patching.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Information flow routes are applied to Llama 2, showing that:', 'raw': 'Information flow routes are applied to Llama 2, showing that:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Models show “typical” information flow routes for non-content words, while content words don’t exhibit such patterns.', 'raw': '1. Models show “typical” information flow routes for non-content words, while content words don’t exhibit such patterns.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Feedforward networks are more active in the bottom layers of the network (where e.g. subject enrichment is performed) and in very last layer.', 'raw': '2. Feedforward networks are more active in the bottom layers of the network (where e.g. subject enrichment is performed) and in very last layer.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Positional and subword-merging attention heads are among the most active and important throughout the network.', 'raw': '3. Positional and subword-merging attention heads are among the most active and important throughout the network.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Periods can be treated by the model as BOS tokens by leaving their residual representation mostly untouched during the forward pass.', 'raw': '4. Periods can be treated by the model as BOS tokens by leaving their residual representation mostly untouched during the forward pass.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Finally, the paper also demonstrates that some model components are specialized for specific domains, such as coding or multilingual texts, suggesting a high degree of modularity in the network. The contribution of domain-specific heads obtained by projecting right singular values of the OV circuit to the unembedding matrix show highly interpretable concepts being handled in granular model components.', 'raw': 'Finally, the paper also demonstrates that some model components are specialized for specific domains, such as coding or multilingual texts, suggesting a high degree of modularity in the network. The contribution of domain-specific heads obtained by projecting right singular values of the OV circuit to the unembedding matrix show highly interpretable concepts being handled in granular model components.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.00824'}, 'url': 'https://huggingface.co/papers/2403.00824', 'raw': 'https://huggingface.co/papers/2403.00824', 'label': 'Information Flow Routes: Automatically Interpreting Language Models at\n Scale (2403.00824)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 All daily picks: ', 'raw': '🔍 All daily picks: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9', 'raw': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Information Flow Routes: Automatically Interpreting Language Models at Scale by @javifer @lena-voita + +This work presents a novel method to identify salient components in Transformer-based language models by decomposing the contribution of various model components into the residual stream. + +This method is more efficient and scalable than previous techniques such as activation patching, as it only requires a single forward pass through the model to identify critical information flow paths. Moreover, it can be applied without a contrastive template, which is observed to produce results dependent on the selected contrastive example for activation patching. + +Information flow routes are applied to Llama 2, showing that: + +1. Models show “typical” information flow routes for non-content words, while content words don’t exhibit such patterns. +2. Feedforward networks are more active in the bottom layers of the network (where e.g. subject enrichment is performed) and in very last layer. +3. Positional and subword-merging attention heads are among the most active and important throughout the network. +4. Periods can be treated by the model as BOS tokens by leaving their residual representation mostly untouched during the forward pass. + +Finally, the paper also demonstrates that some model components are specialized for specific domains, such as coding or multilingual texts, suggesting a high degree of modularity in the network. The contribution of domain-specific heads obtained by projecting right singular values of the OV circuit to the unembedding matrix show highly interpretable concepts being handled in granular model components. + +📄 Paper: https://huggingface.co/papers/2403.00824 + +🔍 All daily picks: https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/5Kmgr3KDK4Zqf4PuzfJmb.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/DOvQlqgcC-WZZ_BNwwh3C.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/-ck9Fnodt4mYZp0_-DcW6.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/lXyBbuRqHnQU9ZLdtbrPr.png'}]","[{'_id': '61a5051ecd3eb3043f38e443', 'avatarUrl': '/avatars/bcc94a31fab7486ca9d018245a289fb0.svg', 'fullname': 'Javier Ferrando', 'name': 'javifer', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6}, {'_id': '65018065c56ace4fdc0014b4', 'avatarUrl': '/avatars/d5e02ca2e88bb9b17f92703927a34df4.svg', 'fullname': 'Elena Voita', 'name': 'lena-voita', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '❤️', 'users': ['Kukedlc', 'samusenps', 'javifer', 'CristianJD', 'Theli'], 'count': 5}, {'reaction': '🔥', 'users': ['eramax', 'mmhamdy'], 'count': 2}]",2024-03-11 16:40:50,2024-03-11 19:15:46.515,[],/posts/gsarti/263130686669707,176,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/657217faabb25ed8aedd5e48/UUHAXeGtOnQBXFD3nYtf2.jpeg,117.0,Vlad Bogolin,vladbogo,915464780192455,"[{'type': 'text', 'value': 'A recent paper titled ""ShortGPT: Layers in Large Language Models are More Redundant Than You Expect"" proposes a simple and effective approach to pruning Large Language Models (LLMs) by removing redundant layers. ', 'raw': 'A recent paper titled ""ShortGPT: Layers in Large Language Models are More Redundant Than You Expect"" proposes a simple and effective approach to pruning Large Language Models (LLMs) by removing redundant layers. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key points:', 'raw': 'Key points:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Discovers significant redundancy across layers in LLMs, with some layers playing a negligible role for the final performance.', 'raw': '* Discovers significant redundancy across layers in LLMs, with some layers playing a negligible role for the final performance.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Defines a new metric called Block Influence (BI) to quantify the importance of each layer in an LLM.', 'raw': '* Defines a new metric called Block Influence (BI) to quantify the importance of each layer in an LLM.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""* Removes layers with low BI scores, achieving up to 25% reduction in parameters and computation while maintaining 92% of the LLM's performance."", 'raw': ""* Removes layers with low BI scores, achieving up to 25% reduction in parameters and computation while maintaining 92% of the LLM's performance.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congrats to the authors for their work!', 'raw': 'Congrats to the authors for their work!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.03853'}, 'url': 'https://huggingface.co/papers/2403.03853', 'raw': 'https://huggingface.co/papers/2403.03853', 'label': 'ShortGPT: Layers in Large Language Models are More Redundant Than You\n Expect (2403.03853)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","A recent paper titled ""ShortGPT: Layers in Large Language Models are More Redundant Than You Expect"" proposes a simple and effective approach to pruning Large Language Models (LLMs) by removing redundant layers. + +Key points: +* Discovers significant redundancy across layers in LLMs, with some layers playing a negligible role for the final performance. +* Defines a new metric called Block Influence (BI) to quantify the importance of each layer in an LLM. +* Removes layers with low BI scores, achieving up to 25% reduction in parameters and computation while maintaining 92% of the LLM's performance. + +Congrats to the authors for their work! + +Paper: https://huggingface.co/papers/2403.03853 + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/OgMvLzFc5c89hQ8mHCVaP.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/ONUWKtYl-6OYCwXO8SQ3Q.png'}]",[],"[{'reaction': '👍', 'users': ['Dlbk', 'emran86', 'dev7halo', 'MexIvanov', 'Pretam'], 'count': 5}, {'reaction': '❤️', 'users': ['emran86'], 'count': 1}]",2024-03-11 15:01:48,2024-03-11 15:01:48.342,[],/posts/vladbogo/915464780192455,111,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,349712597113481,"[{'type': 'text', 'value': 'ELLA', 'raw': 'ELLA'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Equip Diffusion Models with LLM for Enhanced Semantic Alignment', 'raw': 'Equip Diffusion Models with LLM for Enhanced Semantic Alignment'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.05135'}, 'url': 'https://huggingface.co/papers/2403.05135', 'raw': 'https://huggingface.co/papers/2403.05135', 'label': 'ELLA: Equip Diffusion Models with LLM for Enhanced Semantic Alignment (2403.05135)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Diffusion models have demonstrated remarkable performance in the domain of text-to-image generation. However, most widely used models still employ CLIP as their text encoder, which constrains their ability to comprehend dense prompts, encompassing multiple objects, detailed attributes, complex relationships, long-text alignment, etc. In this paper, we introduce an Efficient Large Language Model Adapter, termed ELLA, which equips text-to-image diffusion models with powerful Large Language Models (LLM) to enhance text alignment without training of either U-Net or LLM. To seamlessly bridge two pre-trained models, we investigate a range of semantic alignment connector designs and propose a novel module, the Timestep-Aware Semantic Connector (TSC), which dynamically extracts timestep-dependent conditions from LLM. Our approach adapts semantic features at different stages of the denoising process, assisting diffusion models in interpreting lengthy and intricate prompts over sampling timesteps. Additionally, ELLA can be readily incorporated with community models and tools to improve their prompt-following capabilities. To assess text-to-image models in dense prompt following, we introduce Dense Prompt Graph Benchmark (DPG-Bench), a challenging benchmark consisting of 1K dense prompts. Extensive experiments demonstrate the superiority of ELLA in dense prompt following compared to state-of-the-art methods, particularly in multiple object compositions involving diverse attributes and relationships.', 'raw': 'Diffusion models have demonstrated remarkable performance in the domain of text-to-image generation. However, most widely used models still employ CLIP as their text encoder, which constrains their ability to comprehend dense prompts, encompassing multiple objects, detailed attributes, complex relationships, long-text alignment, etc. In this paper, we introduce an Efficient Large Language Model Adapter, termed ELLA, which equips text-to-image diffusion models with powerful Large Language Models (LLM) to enhance text alignment without training of either U-Net or LLM. To seamlessly bridge two pre-trained models, we investigate a range of semantic alignment connector designs and propose a novel module, the Timestep-Aware Semantic Connector (TSC), which dynamically extracts timestep-dependent conditions from LLM. Our approach adapts semantic features at different stages of the denoising process, assisting diffusion models in interpreting lengthy and intricate prompts over sampling timesteps. Additionally, ELLA can be readily incorporated with community models and tools to improve their prompt-following capabilities. To assess text-to-image models in dense prompt following, we introduce Dense Prompt Graph Benchmark (DPG-Bench), a challenging benchmark consisting of 1K dense prompts. Extensive experiments demonstrate the superiority of ELLA in dense prompt following compared to state-of-the-art methods, particularly in multiple object compositions involving diverse attributes and relationships.'}, {'type': 'new_line', 'raw': '\n'}]","ELLA + +Equip Diffusion Models with LLM for Enhanced Semantic Alignment + +https://huggingface.co/papers/2403.05135 + +Diffusion models have demonstrated remarkable performance in the domain of text-to-image generation. However, most widely used models still employ CLIP as their text encoder, which constrains their ability to comprehend dense prompts, encompassing multiple objects, detailed attributes, complex relationships, long-text alignment, etc. In this paper, we introduce an Efficient Large Language Model Adapter, termed ELLA, which equips text-to-image diffusion models with powerful Large Language Models (LLM) to enhance text alignment without training of either U-Net or LLM. To seamlessly bridge two pre-trained models, we investigate a range of semantic alignment connector designs and propose a novel module, the Timestep-Aware Semantic Connector (TSC), which dynamically extracts timestep-dependent conditions from LLM. Our approach adapts semantic features at different stages of the denoising process, assisting diffusion models in interpreting lengthy and intricate prompts over sampling timesteps. Additionally, ELLA can be readily incorporated with community models and tools to improve their prompt-following capabilities. To assess text-to-image models in dense prompt following, we introduce Dense Prompt Graph Benchmark (DPG-Bench), a challenging benchmark consisting of 1K dense prompts. Extensive experiments demonstrate the superiority of ELLA in dense prompt following compared to state-of-the-art methods, particularly in multiple object compositions involving diverse attributes and relationships. +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/AO09kvhH-AfnF_xCUG-CX.png'}]",[],"[{'reaction': '👍', 'users': ['vladbogo', 'Dlbk', 'samusenps', 'ClayFace', 'Zmu', 'melohux', 'Benson'], 'count': 7}, {'reaction': '❤️', 'users': ['melohux', 'bmorphism'], 'count': 2}]",2024-03-11 14:07:56,2024-03-11 14:07:56.218,[],/posts/akhaliq/349712597113481,186,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5fef4eb7770b06e11c2c6381/1NMdigjCGtn0yvQZSi5NJ.png,64.0,Alessandro Ercolani,giux78,835823846472163,"[{'type': 'text', 'value': 'Super work from ', 'raw': 'Super work from '}, {'type': 'mention', 'user': 'DeepMount00', 'raw': '@DeepMount00'}, {'type': 'text', 'value': ':', 'raw': ':'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 𝐃𝐢𝐬𝐜𝐨𝐯𝐞𝐫 𝐔𝐧𝐢𝐯𝐞𝐫𝐬𝐚𝐥 𝐍𝐞𝐫: 𝐀 𝐆𝐥𝐢𝐍𝐞𝐫-𝐁𝐚𝐬𝐞𝐝 𝐈𝐭𝐚𝐥𝐢𝐚𝐧 𝐍𝐄𝐑', 'raw': '🚀 𝐃𝐢𝐬𝐜𝐨𝐯𝐞𝐫 𝐔𝐧𝐢𝐯𝐞𝐫𝐬𝐚𝐥 𝐍𝐞𝐫: 𝐀 𝐆𝐥𝐢𝐍𝐞𝐫-𝐁𝐚𝐬𝐞𝐝 𝐈𝐭𝐚𝐥𝐢𝐚𝐧 𝐍𝐄𝐑'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Introducing 𝐔𝐧𝐢𝐯𝐞𝐫𝐬𝐚𝐥 𝐍𝐞𝐫 𝐟𝐨𝐫 𝐈𝐭𝐚𝐥𝐢𝐚𝐧 𝐋𝐚𝐧𝐠𝐮𝐚𝐠𝐞, a revolutionary Named Entity Recognition (NER) model evolved from the GliNer architecture and meticulously tailored for the Italian language. This advanced model is a beacon of efficiency and versatility, engineered to 𝐫𝐞𝐜𝐨𝐠𝐧𝐢𝐳𝐞 𝐚𝐧𝐲 𝐞𝐧𝐭𝐢𝐭𝐲 𝐭𝐲𝐩𝐞 within the rich nuances of Italian, using a bidirectional transformer encoder. It stands out as an ideal solution for those navigating the challenges of resource-limited environments or seeking an efficient alternative to the cumbersome Large Language Models (LLMs).', 'raw': 'Introducing 𝐔𝐧𝐢𝐯𝐞𝐫𝐬𝐚𝐥 𝐍𝐞𝐫 𝐟𝐨𝐫 𝐈𝐭𝐚𝐥𝐢𝐚𝐧 𝐋𝐚𝐧𝐠𝐮𝐚𝐠𝐞, a revolutionary Named Entity Recognition (NER) model evolved from the GliNer architecture and meticulously tailored for the Italian language. This advanced model is a beacon of efficiency and versatility, engineered to 𝐫𝐞𝐜𝐨𝐠𝐧𝐢𝐳𝐞 𝐚𝐧𝐲 𝐞𝐧𝐭𝐢𝐭𝐲 𝐭𝐲𝐩𝐞 within the rich nuances of Italian, using a bidirectional transformer encoder. It stands out as an ideal solution for those navigating the challenges of resource-limited environments or seeking an efficient alternative to the cumbersome Large Language Models (LLMs).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '𝐑𝐮𝐧𝐬 𝐟𝐚𝐬𝐭 𝐚𝐥𝐬𝐨 𝐨𝐧 𝐂𝐏𝐔!', 'raw': '𝐑𝐮𝐧𝐬 𝐟𝐚𝐬𝐭 𝐚𝐥𝐬𝐨 𝐨𝐧 𝐂𝐏𝐔!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Experience this Italian-focused innovation live on Hugging Face Spaces:', 'raw': 'Experience this Italian-focused innovation live on Hugging Face Spaces:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DeepMount00/universal_ner_ita'}, 'url': 'https://huggingface.co/spaces/DeepMount00/universal_ner_ita', 'raw': 'https://huggingface.co/spaces/DeepMount00/universal_ner_ita'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2311.08526', 'raw': 'https://arxiv.org/abs/2311.08526'}, {'type': 'text', 'value': ' Urchade Zaratiana et all. great work!', 'raw': ' Urchade Zaratiana et all. great work!'}]","Super work from @DeepMount00: + +🚀 𝐃𝐢𝐬𝐜𝐨𝐯𝐞𝐫 𝐔𝐧𝐢𝐯𝐞𝐫𝐬𝐚𝐥 𝐍𝐞𝐫: 𝐀 𝐆𝐥𝐢𝐍𝐞𝐫-𝐁𝐚𝐬𝐞𝐝 𝐈𝐭𝐚𝐥𝐢𝐚𝐧 𝐍𝐄𝐑 + +Introducing 𝐔𝐧𝐢𝐯𝐞𝐫𝐬𝐚𝐥 𝐍𝐞𝐫 𝐟𝐨𝐫 𝐈𝐭𝐚𝐥𝐢𝐚𝐧 𝐋𝐚𝐧𝐠𝐮𝐚𝐠𝐞, a revolutionary Named Entity Recognition (NER) model evolved from the GliNer architecture and meticulously tailored for the Italian language. This advanced model is a beacon of efficiency and versatility, engineered to 𝐫𝐞𝐜𝐨𝐠𝐧𝐢𝐳𝐞 𝐚𝐧𝐲 𝐞𝐧𝐭𝐢𝐭𝐲 𝐭𝐲𝐩𝐞 within the rich nuances of Italian, using a bidirectional transformer encoder. It stands out as an ideal solution for those navigating the challenges of resource-limited environments or seeking an efficient alternative to the cumbersome Large Language Models (LLMs). +𝐑𝐮𝐧𝐬 𝐟𝐚𝐬𝐭 𝐚𝐥𝐬𝐨 𝐨𝐧 𝐂𝐏𝐔! + +Experience this Italian-focused innovation live on Hugging Face Spaces: +https://huggingface.co/spaces/DeepMount00/universal_ner_ita + +Paper: https://arxiv.org/abs/2311.08526 Urchade Zaratiana et all. great work!",[],"[{'_id': '64f1bf6a8b550e875926a590', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f1bf6a8b550e875926a590/xdZHPQGdI2jISWcKhWTMQ.png', 'fullname': 'Michele Montebovi', 'name': 'DeepMount00', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 176}]","[{'reaction': '❤️', 'users': ['samusenps', 'osanseviero', 'Dlbk', 'DeepMount00', 'urchade', 'giux78', 'tomaarsen', 'MoritzLaurer'], 'count': 8}]",2024-03-11 13:22:40,2024-03-14 10:10:31.401,"[{'_id': '64f1bf6a8b550e875926a590', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64f1bf6a8b550e875926a590/xdZHPQGdI2jISWcKhWTMQ.png', 'fullname': 'Michele Montebovi', 'name': 'DeepMount00', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 176, 'isFollowing': False}, {'_id': '6317233cc92fd6fee317e030', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png', 'fullname': 'Tom Aarsen', 'name': 'tomaarsen', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2266, 'isFollowing': False}, {'_id': '5fef4eb7770b06e11c2c6381', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5fef4eb7770b06e11c2c6381/1NMdigjCGtn0yvQZSi5NJ.png', 'fullname': 'Alessandro Ercolani', 'name': 'giux78', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 64, 'isFollowing': False}]",/posts/giux78/835823846472163,121,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/63893d4c184615e463aa24b8/S1flsX_26OF6ZJBVcPlaf.jpeg,65.0,Matt Valoatto,mvaloatto,533611740937860,"[{'type': 'text', 'value': 'Good time to drop some 🤗 in the comments > ', 'raw': 'Good time to drop some 🤗 in the comments > '}, {'type': 'link', 'href': 'https://x.com/elonmusk/status/1767108624038449405', 'raw': 'https://x.com/elonmusk/status/1767108624038449405'}]",Good time to drop some 🤗 in the comments > https://x.com/elonmusk/status/1767108624038449405,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/63893d4c184615e463aa24b8/dJaDhKsIrSNQfGDj_UNpV.png'}]",[],"[{'reaction': '👍', 'users': ['samusenps', 'Dlbk', 'sa8', 'frntn', 'clefourrier', 'Zmu', 'CKeibel', 'lvalue', 'clem'], 'count': 9}]",2024-03-11 12:27:59,2024-03-12 04:19:22.813,"[{'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}, {'_id': '63893d4c184615e463aa24b8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63893d4c184615e463aa24b8/S1flsX_26OF6ZJBVcPlaf.jpeg', 'fullname': 'Matt Valoatto', 'name': 'mvaloatto', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}, {'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/mvaloatto/533611740937860,391,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/YeFyz1AZVcCRsyNHHtwJG.jpeg,210.0,Sebastian Gabarain,Locutusque,123362113265640,"[{'type': 'text', 'value': '🚀 Introducing UltraTextbooks v2: The Ultimate Educational NLP Dataset! 📚', 'raw': '🚀 Introducing UltraTextbooks v2: The Ultimate Educational NLP Dataset! 📚'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've expanded the dataset to include an even wider range of high-quality textbooks, with a special focus on machine learning, mathematics, and coding. 💻🧮"", 'raw': ""I've expanded the dataset to include an even wider range of high-quality textbooks, with a special focus on machine learning, mathematics, and coding. 💻🧮""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'With over 3 million examples and 6 GB of data, UltraTextbooks v2 is your go-to resource for training advanced language models and developing cutting-edge educational applications. 🎓', 'raw': 'With over 3 million examples and 6 GB of data, UltraTextbooks v2 is your go-to resource for training advanced language models and developing cutting-edge educational applications. 🎓'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Explore the dataset on Hugging Face and unlock the power of AI in education! 🔓', 'raw': 'Explore the dataset on Hugging Face and unlock the power of AI in education! 🔓'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Locutusque/UltraTextbooks-2.0'}, 'url': 'https://huggingface.co/datasets/Locutusque/UltraTextbooks-2.0', 'raw': 'https://huggingface.co/datasets/Locutusque/UltraTextbooks-2.0'}]","🚀 Introducing UltraTextbooks v2: The Ultimate Educational NLP Dataset! 📚 + +I've expanded the dataset to include an even wider range of high-quality textbooks, with a special focus on machine learning, mathematics, and coding. 💻🧮 + +With over 3 million examples and 6 GB of data, UltraTextbooks v2 is your go-to resource for training advanced language models and developing cutting-edge educational applications. 🎓 + +Explore the dataset on Hugging Face and unlock the power of AI in education! 🔓 + +https://huggingface.co/datasets/Locutusque/UltraTextbooks-2.0",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'Felladrin', 'clem', 'samusenps', 'Jjjjjo'], 'count': 5}]",2024-03-07 04:18:53,2024-03-07 04:19:17.070,[],/posts/Locutusque/123362113265640,332,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/647280de0c2b5fdaf1f49b87/KosE59r9VBXocSnoQEcxw.jpeg,3.0,Michael Shenoda,mshenoda,337420060295579,"[{'type': 'text', 'value': ""Empower yourself with knowledge and skills. The power of knowledge is the key for advancement. Realize that without discovering your weaknesses, it would be hard to improve yourself. No one is perfect, no one knows everything, but we all have the freedom to learn and discover our true potential. It's important to surround yourself with positivity and people who help you push forward to improve. Lifelong learning is a crucial key of success."", 'raw': ""Empower yourself with knowledge and skills. The power of knowledge is the key for advancement. Realize that without discovering your weaknesses, it would be hard to improve yourself. No one is perfect, no one knows everything, but we all have the freedom to learn and discover our true potential. It's important to surround yourself with positivity and people who help you push forward to improve. Lifelong learning is a crucial key of success.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '#LifelongLearning #Success #Enthusiasm ', 'raw': '#LifelongLearning #Success #Enthusiasm '}]","Empower yourself with knowledge and skills. The power of knowledge is the key for advancement. Realize that without discovering your weaknesses, it would be hard to improve yourself. No one is perfect, no one knows everything, but we all have the freedom to learn and discover our true potential. It's important to surround yourself with positivity and people who help you push forward to improve. Lifelong learning is a crucial key of success. + +#LifelongLearning #Success #Enthusiasm ",[],[],"[{'reaction': '❤️', 'users': ['samusenps', 'osanseviero'], 'count': 2}, {'reaction': '👍', 'users': ['samusenps'], 'count': 1}]",2024-03-06 22:13:28,2024-03-07 03:34:02.078,"[{'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}]",/posts/mshenoda/337420060295579,386,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/63081e15a670ed10f9d44229/w1b9uq-9774bMMgJbSPsS.jpeg,245.0,Yuntian Deng,yuntian-deng,712785447418194,"[{'type': 'text', 'value': 'Hi everyone,', 'raw': 'Hi everyone,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'd like to share our free GPT-4 chatbot: "", 'raw': ""I'd like to share our free GPT-4 chatbot: ""}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'yuntian-deng/ChatGPT4'}, 'url': 'https://huggingface.co/spaces/yuntian-deng/ChatGPT4', 'raw': 'https://huggingface.co/spaces/yuntian-deng/ChatGPT4'}, {'type': 'text', 'value': '. Data collected from it will be shared back with the community in future releases of the WildChat dataset: ', 'raw': '. Data collected from it will be shared back with the community in future releases of the WildChat dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'allenai/WildChat'}, 'url': 'https://huggingface.co/datasets/allenai/WildChat', 'raw': 'https://huggingface.co/datasets/allenai/WildChat'}, {'type': 'text', 'value': '. Please help us reach 1 million conversations!', 'raw': '. Please help us reach 1 million conversations!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks,', 'raw': 'Thanks,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Yuntian', 'raw': 'Yuntian'}]","Hi everyone, + +I'd like to share our free GPT-4 chatbot: https://huggingface.co/spaces/yuntian-deng/ChatGPT4. Data collected from it will be shared back with the community in future releases of the WildChat dataset: https://huggingface.co/datasets/allenai/WildChat. Please help us reach 1 million conversations! + +Thanks, +Yuntian",[],[],"[{'reaction': '🤗', 'users': ['yuntian-deng', 'iDrops', 'jinghan23', 'ajibawa-2023', 'osanseviero', 'samusenps', 'Dlbk', 'Tonic', 'Lewdiculous', 'psy-taha', 'artghoul', 'RobertRoss', 'asp933', 'IdleIdiot', 'SwedMlite', 'Maria200035', 'LinJuan', 'WollyCAT'], 'count': 18}, {'reaction': '❤️', 'users': ['artghoul', 'Aerialyn', 'lololololololokok', 'caiquemoa', 'Johanbb', 'dekuartwire', 'Maria200035'], 'count': 7}, {'reaction': '🔥', 'users': ['psy-taha', 'artghoul', 'hasi243', 'jhojhocraazy'], 'count': 4}]",2024-03-06 22:10:56,2025-06-17 07:56:11.623,"[{'_id': '66f7ab733c7ffa793194b70a', 'avatarUrl': '/avatars/8670c8b42b82685ceccac483a760edab.svg', 'fullname': 'Hellyc Huang', 'name': 'Hellyc01', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}, {'_id': '65eb03dc1200e549a3e6ea91', 'avatarUrl': '/avatars/2df385a1a554f2ce0da90ac9f2427d44.svg', 'fullname': 'nhan nguyen', 'name': 'nhannguyen26', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '64f3eb890cab3be011370b3f', 'avatarUrl': '/avatars/ebe9d1e21e3e56178e5db5acbb54d1ae.svg', 'fullname': 'Ockert Slabbert', 'name': 'Ocks', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '63e4c62904b507b491f12f0b', 'avatarUrl': '/avatars/c75047b7649e851e1bf6411794881b3b.svg', 'fullname': 'Johh West', 'name': 'Smokez17', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '670683eda99750f7d37f51a7', 'avatarUrl': '/avatars/5605617a444ec872323f4d6e1f407733.svg', 'fullname': 'stas', 'name': 'abakanai', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}, {'_id': '671d7cd5be150c12ed1cc5dd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/rRt2vqFG7eCO25DeQLtq7.png', 'fullname': 'Maria da Ascensão de Almeida Saraiva', 'name': '13-10', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6659b674377e197825eef323', 'avatarUrl': '/avatars/955d64926b43e2cb5fc74873e66e3a81.svg', 'fullname': 'Мария', 'name': 'Maria200035', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '680c41e52e2f41606dbe63b5', 'avatarUrl': '/avatars/ee33199a7502f45d529aef0e8925c492.svg', 'fullname': 'man', 'name': 'anup1100', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '65aaf97957f263e3d068ff5e', 'avatarUrl': '/avatars/7a3af97743e1d6eba80fad61a8e0e459.svg', 'fullname': 'Ss', 'name': 'Ss222666', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/yuntian-deng/712785447418194,18487,,12 +https://cdn-avatars.huggingface.co/v1/production/uploads/657217faabb25ed8aedd5e48/UUHAXeGtOnQBXFD3nYtf2.jpeg,117.0,Vlad Bogolin,vladbogo,286795551282908,"[{'type': 'text', 'value': 'The ""Design2Code: How Far Are We From Automating Front-End Engineering"" paper presents a benchmark for multimodal large language models (LLMs) aimed at automating front-end web development by translating webpage designs (screenshots) into code. This task evaluates the models\' ability to recreate webpages that are visually and structurally similar to the original designs.', 'raw': 'The ""Design2Code: How Far Are We From Automating Front-End Engineering"" paper presents a benchmark for multimodal large language models (LLMs) aimed at automating front-end web development by translating webpage designs (screenshots) into code. This task evaluates the models\' ability to recreate webpages that are visually and structurally similar to the original designs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key Points:', 'raw': 'Key Points:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Introduces the Design2Code task and benchmark for converting webpage screenshots into code, aiming to automate front-end web development.', 'raw': '* Introduces the Design2Code task and benchmark for converting webpage screenshots into code, aiming to automate front-end web development.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* Evaluates multimodal LLMs using comprehensive metrics for visual similarity and element matching.', 'raw': '* Evaluates multimodal LLMs using comprehensive metrics for visual similarity and element matching.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* GPT-4V outperforms other models in terms of visual resemblance and content accuracy, with generated webpages often preferred over the original references.', 'raw': '* GPT-4V outperforms other models in terms of visual resemblance and content accuracy, with generated webpages often preferred over the original references.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2403.03163'}, 'url': 'https://huggingface.co/papers/2403.03163', 'raw': 'https://huggingface.co/papers/2403.03163', 'label': 'Design2Code: How Far Are We From Automating Front-End Engineering? (2403.03163)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Project page: ', 'raw': 'Project page: '}, {'type': 'link', 'href': 'https://salt-nlp.github.io/Design2Code/', 'raw': 'https://salt-nlp.github.io/Design2Code/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset: ', 'raw': 'Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'SALT-NLP/Design2Code'}, 'url': 'https://huggingface.co/datasets/SALT-NLP/Design2Code', 'raw': 'https://huggingface.co/datasets/SALT-NLP/Design2Code'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congrats to the authors for their work!', 'raw': 'Congrats to the authors for their work!'}]","The ""Design2Code: How Far Are We From Automating Front-End Engineering"" paper presents a benchmark for multimodal large language models (LLMs) aimed at automating front-end web development by translating webpage designs (screenshots) into code. This task evaluates the models' ability to recreate webpages that are visually and structurally similar to the original designs. + +Key Points: +* Introduces the Design2Code task and benchmark for converting webpage screenshots into code, aiming to automate front-end web development. +* Evaluates multimodal LLMs using comprehensive metrics for visual similarity and element matching. +* GPT-4V outperforms other models in terms of visual resemblance and content accuracy, with generated webpages often preferred over the original references. + +Paper: https://huggingface.co/papers/2403.03163 +Project page: https://salt-nlp.github.io/Design2Code/ +Dataset: https://huggingface.co/datasets/SALT-NLP/Design2Code + +Congrats to the authors for their work!",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'samusenps'], 'count': 2}, {'reaction': '👍', 'users': ['samusenps'], 'count': 1}]",2024-03-06 22:10:01,2024-03-06 22:11:54.727,[],/posts/vladbogo/286795551282908,43,,0 +/avatars/d4947439475dc81f2c9e9304382b6257.svg,1.0,Qun Gao,qgao007,704777926081097,"[{'type': 'text', 'value': 'Hello world! ', 'raw': 'Hello world! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Hello world! + +",[],[],"[{'reaction': '🤗', 'users': ['samusenps', 'QizhiPei'], 'count': 2}]",2024-03-06 22:07:41,2024-03-08 11:13:16.308,"[{'_id': '63c7179750cc81901daaadbc', 'avatarUrl': '/avatars/9fe1518c4b2d12e36733650bb0c87932.svg', 'fullname': 'Flouga Droi', 'name': 'flofloga', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/qgao007/704777926081097,54,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/626a9b5205fe1cb65720e00e/hyWcWn_8jVZsu1Yc5Z0R8.png,4.0,Bill Psomas,billpsomas,515989665788135,"[{'type': 'text', 'value': ""Wow! Hello world. How's ECCV submissions going?"", 'raw': ""Wow! Hello world. How's ECCV submissions going?""}]",Wow! Hello world. How's ECCV submissions going?,[],[],"[{'reaction': '🤗', 'users': ['osanseviero'], 'count': 1}]",2024-03-06 21:37:59,2024-03-06 21:37:59.215,[],/posts/billpsomas/515989665788135,56,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1664378156457-noauth.png,15.0,Marko Vidrih,MarkoVidrih,369453933037543,"[{'type': 'text', 'value': 'Hi everyone! ', 'raw': 'Hi everyone! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Looking forward to engaging with you all 🤗', 'raw': 'Looking forward to engaging with you all 🤗'}]","Hi everyone! + +Looking forward to engaging with you all 🤗",[],[],"[{'reaction': '👍', 'users': ['Rexhaif', 'Puyush', 'Gigahardglob', 'BoeChase'], 'count': 4}]",2024-03-06 21:32:06,2025-01-16 04:35:40.546,"[{'_id': '64a84a1f6324705e6a1ad5bd', 'avatarUrl': '/avatars/b238e7bb6d0dca0c8b9a5b7142d742d8.svg', 'fullname': 'Puyush Gupta', 'name': 'Puyush', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4, 'isFollowing': False}, {'_id': '6334653886c3fdcdc7ab8f7e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1664378156457-noauth.png', 'fullname': 'Marko Vidrih', 'name': 'MarkoVidrih', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 15, 'isFollowing': False}, {'_id': '67888b689d9f42a8c945871d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/no-auth/T1ypORrB_dCfNXrnEAy0-.png', 'fullname': 'Chase Boe', 'name': 'BoeChase', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/MarkoVidrih/369453933037543,924,,7 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e4318d616b09a31220980d6/24rMJ_vPh3gW9ZEmj64xr.png,3180.0,Manuel Romero,mrm8488,844012818399912,"[{'type': 'text', 'value': 'Hello world! 🔥', 'raw': 'Hello world! 🔥'}]",Hello world! 🔥,[],[],"[{'reaction': '🤗', 'users': ['radames', 'DmitryRyumin', 'weizhey', 'osanseviero', 'stefan-it', 'apol', 'taesiri', 'ZennyKenny', 'den0620', 'samusenps', 'edeani', 'rahmanansari', 'iv7dev', 'Nacholmo', 'erickdp', 'EddyGiusepe', 'jbilcke-hf', 'leegao19', 'Noomam'], 'count': 19}, {'reaction': '👍', 'users': ['rtscott2001', 'edeani', 'CreitinGameplays', 'jbilcke-hf'], 'count': 4}, {'reaction': '🤝', 'users': ['junaid1993', 'jbilcke-hf'], 'count': 2}]",2024-03-06 20:39:40,2024-03-06 20:39:40.862,[],/posts/mrm8488/844012818399912,2272,,0 +/avatars/89f118f880cce3d01658b123bbbf4402.svg,2.0,Zhiwei Liu,jimzhiwei,289347657662488,"[{'type': 'text', 'value': 'Sharing our paper and library for building LLM agent. The library is less than 1K code lines!', 'raw': 'Sharing our paper and library for building LLM agent. The library is less than 1K code lines!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/SalesforceAIResearch/AgentLite', 'raw': 'https://github.com/SalesforceAIResearch/AgentLite'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://arxiv.org/abs/2402.15538', 'raw': 'https://arxiv.org/abs/2402.15538'}, {'type': 'new_line', 'raw': '\n'}]","Sharing our paper and library for building LLM agent. The library is less than 1K code lines! +https://github.com/SalesforceAIResearch/AgentLite +https://arxiv.org/abs/2402.15538 +",[],[],"[{'reaction': '❤️', 'users': ['clem', 'osanseviero', 'taesiri', 'qgao007', 'victor', 'samusenps', 'kumarh1982', 'Hanyu66', 'theArif'], 'count': 9}]",2024-03-06 20:33:27,2024-03-06 20:33:27.402,[],/posts/jimzhiwei/289347657662488,23,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/63040a870547362a22a78ff2/LSwmjTH1UCndb2IbfNVFz.jpeg,4.0,Artan Salihu,Artan,170404494417782,"[{'type': 'text', 'value': '🤗', 'raw': '🤗'}]",🤗,[],[],"[{'reaction': '🤗', 'users': ['osanseviero', 'Suparious', 'samusenps', 'dashfunnydashdash', 'ZennyKenny', 'thomwolf', 'Lewdiculous', 'victor'], 'count': 8}, {'reaction': '🤝', 'users': ['victor'], 'count': 1}]",2024-03-06 20:27:33,2024-03-06 20:27:33.202,[],/posts/Artan/170404494417782,44,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,475713755264649,"[{'type': 'text', 'value': ""Where I work, we are obsessed with what happens to a model's performance after it has been deployed. We call this post-deployment data science."", 'raw': ""Where I work, we are obsessed with what happens to a model's performance after it has been deployed. We call this post-deployment data science.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Let me tell you about a post-deployment data science algorithm that we recently developed to measure the impact of Concept Drift on a model's performance."", 'raw': ""Let me tell you about a post-deployment data science algorithm that we recently developed to measure the impact of Concept Drift on a model's performance.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How can we detect Concept Drift? 🤔', 'raw': 'How can we detect Concept Drift? 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""All ML models are designed to do one thing: learning a probability distribution in the form of P(y|X). In other words, they try to learn how to model an outcome 'y' given the input variables 'X'. 🧠"", 'raw': ""All ML models are designed to do one thing: learning a probability distribution in the form of P(y|X). In other words, they try to learn how to model an outcome 'y' given the input variables 'X'. 🧠""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This probability distribution, P(y|X), is also called Concept. Therefore, if the Concept changes, the model may become invalid.', 'raw': 'This probability distribution, P(y|X), is also called Concept. Therefore, if the Concept changes, the model may become invalid.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '❓But how do we know if there is a new Concept in our data?', 'raw': '❓But how do we know if there is a new Concept in our data?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""❓Or, more important, how do we measure if the new Concept is affecting the model's performance?"", 'raw': ""❓Or, more important, how do we measure if the new Concept is affecting the model's performance?""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""💡 We came up with a clever solution where the main ingredients are a reference dataset, one where the model's performance is known, and a dataset with the latest data we would like to monitor."", 'raw': ""💡 We came up with a clever solution where the main ingredients are a reference dataset, one where the model's performance is known, and a dataset with the latest data we would like to monitor.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👣 Step-by-Step solution:', 'raw': '👣 Step-by-Step solution:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ We start by training an internal model on a chunk of the latest data. ➡️ This allows us to learn the new possible Concept presented in the data.', 'raw': '1️⃣ We start by training an internal model on a chunk of the latest data. ➡️ This allows us to learn the new possible Concept presented in the data.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Next, we use the internal model to make predictions on the reference dataset.', 'raw': '2️⃣ Next, we use the internal model to make predictions on the reference dataset.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""3️⃣ We then estimate the model's performance on the reference dataset, assuming the model's predictions on the monitoring data as ground truth."", 'raw': ""3️⃣ We then estimate the model's performance on the reference dataset, assuming the model's predictions on the monitoring data as ground truth.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4️⃣ If the estimated performance of the internal model and the actual monitored model are very different, we then say that there has been a Concept Drift.', 'raw': '4️⃣ If the estimated performance of the internal model and the actual monitored model are very different, we then say that there has been a Concept Drift.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""To quantify how this Concept impacts performance, we subtract the actual model's performance on reference from the estimated performance and report a delta of the performance metric. ➡️ This is what the plot below shows. The change of the F1-score due to Concept drift! 🚨"", 'raw': ""To quantify how this Concept impacts performance, we subtract the actual model's performance on reference from the estimated performance and report a delta of the performance metric. ➡️ This is what the plot below shows. The change of the F1-score due to Concept drift! 🚨""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This process is repeated for every new chunk of data that we get. 🔁', 'raw': 'This process is repeated for every new chunk of data that we get. 🔁'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Where I work, we are obsessed with what happens to a model's performance after it has been deployed. We call this post-deployment data science. + +Let me tell you about a post-deployment data science algorithm that we recently developed to measure the impact of Concept Drift on a model's performance. + +How can we detect Concept Drift? 🤔 + +All ML models are designed to do one thing: learning a probability distribution in the form of P(y|X). In other words, they try to learn how to model an outcome 'y' given the input variables 'X'. 🧠 + +This probability distribution, P(y|X), is also called Concept. Therefore, if the Concept changes, the model may become invalid. + +❓But how do we know if there is a new Concept in our data? +❓Or, more important, how do we measure if the new Concept is affecting the model's performance? + +💡 We came up with a clever solution where the main ingredients are a reference dataset, one where the model's performance is known, and a dataset with the latest data we would like to monitor. + +👣 Step-by-Step solution: + +1️⃣ We start by training an internal model on a chunk of the latest data. ➡️ This allows us to learn the new possible Concept presented in the data. + +2️⃣ Next, we use the internal model to make predictions on the reference dataset. + +3️⃣ We then estimate the model's performance on the reference dataset, assuming the model's predictions on the monitoring data as ground truth. + +4️⃣ If the estimated performance of the internal model and the actual monitored model are very different, we then say that there has been a Concept Drift. + +To quantify how this Concept impacts performance, we subtract the actual model's performance on reference from the estimated performance and report a delta of the performance metric. ➡️ This is what the plot below shows. The change of the F1-score due to Concept drift! 🚨 + +This process is repeated for every new chunk of data that we get. 🔁 + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/1YRx7NALfSIm0fHozah8B.jpeg'}]",[],"[{'reaction': '👍', 'users': ['ajibawa-2023', 'fblgit', 'jessicagab', 'Nhebo', 'victor', 'vishwask', 'codito', 'anujd9'], 'count': 8}, {'reaction': '❤️', 'users': ['gsarti', 'clem', 'dlicari', 'samusenps', 'Kukedlc'], 'count': 5}]",2024-02-29 08:13:52,2024-02-29 08:13:52.488,[],/posts/santiviquez/475713755264649,31,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png,5900.0,Joshua,Xenova,804343794091633,"[{'type': 'text', 'value': 'Real-time object detection w/ 🤗 Transformers.js, running YOLOv9 locally in your browser! 🤯', 'raw': 'Real-time object detection w/ 🤗 Transformers.js, running YOLOv9 locally in your browser! 🤯'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try it out yourself: ', 'raw': 'Try it out yourself: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'Xenova/video-object-detection'}, 'url': 'https://huggingface.co/spaces/Xenova/video-object-detection', 'raw': 'https://huggingface.co/spaces/Xenova/video-object-detection'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(Model used + example code: ', 'raw': '(Model used + example code: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'Xenova/gelan-c_all'}, 'url': 'https://huggingface.co/Xenova/gelan-c_all', 'raw': 'https://huggingface.co/Xenova/gelan-c_all'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This demo shows why on-device ML is so important:', 'raw': 'This demo shows why on-device ML is so important:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Privacy - local inference means no user data is sent to the cloud', 'raw': '1. Privacy - local inference means no user data is sent to the cloud'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. No server latency - empowers developers to build real-time applications', 'raw': '2. No server latency - empowers developers to build real-time applications'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Lower costs - no need to pay for bandwidth and processing of streamed video', 'raw': '3. Lower costs - no need to pay for bandwidth and processing of streamed video'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I can't wait to see what you build with it! 🔥"", 'raw': ""I can't wait to see what you build with it! 🔥""}]","Real-time object detection w/ 🤗 Transformers.js, running YOLOv9 locally in your browser! 🤯 + +Try it out yourself: https://huggingface.co/spaces/Xenova/video-object-detection +(Model used + example code: https://huggingface.co/Xenova/gelan-c_all) + +This demo shows why on-device ML is so important: +1. Privacy - local inference means no user data is sent to the cloud +2. No server latency - empowers developers to build real-time applications +3. Lower costs - no need to pay for bandwidth and processing of streamed video + +I can't wait to see what you build with it! 🔥","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61b253b7ac5ecaae3d1efe0c/xpvEiQ7odTiFGOQluLaJg.mp4'}]",[],"[{'reaction': '❤️', 'users': ['DmitryRyumin', 'macadeliccc', 'giux78', 'radames', 'ajibawa-2023', 'osanseviero', 'Dlbk', 'jaickerag', 'victor', 'mvaloatto', 'clem', 'samusenps', 'bwang0911', 'Thepickledegg', 'hogunkim', 'felixdrp', 'Noomam', 'Youngwon', 'elcrei', 'ackinc'], 'count': 20}, {'reaction': '👍', 'users': ['jaickerag', 'mrkbac', 'yxxsgdmn', 'JoPmt', 'elcrei'], 'count': 5}]",2024-02-28 18:41:21,2024-03-04 13:04:38.304,"[{'_id': '6492ab67354ac5752a2754a3', 'avatarUrl': '/avatars/9d6050996fc440ad6693bc05087d66d8.svg', 'fullname': 'Rahul Atlury', 'name': 'atlury', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5, 'isFollowing': False}, {'_id': '61b253b7ac5ecaae3d1efe0c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png', 'fullname': 'Joshua', 'name': 'Xenova', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5900, 'isFollowing': False}]",/posts/Xenova/804343794091633,3538,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,591814698484680,"[{'type': 'text', 'value': 'I have dedicated several days, working over 12 hours each day, on SUPIR (Scaling-UP Image Restoration), a cutting-edge image enhancement and upscaling model introduced in the paper Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild.', 'raw': 'I have dedicated several days, working over 12 hours each day, on SUPIR (Scaling-UP Image Restoration), a cutting-edge image enhancement and upscaling model introduced in the paper Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This model is simply mind-blowing. At the bottom of this post, you will see side-by-side comparisons of SUPIR versus the extremely expensive online service, Magnific AI. Magnific is known to be the best among the community. However, SUPIR is by far superior. SUPIR also significantly outperforms Topaz AI upscale. SUPIR manages to remain faithful to the original image almost 100% while adding details and achieving super upscaling with the best realism.', 'raw': 'This model is simply mind-blowing. At the bottom of this post, you will see side-by-side comparisons of SUPIR versus the extremely expensive online service, Magnific AI. Magnific is known to be the best among the community. However, SUPIR is by far superior. SUPIR also significantly outperforms Topaz AI upscale. SUPIR manages to remain faithful to the original image almost 100% while adding details and achieving super upscaling with the best realism.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can read the full blog post here : ', 'raw': 'You can read the full blog post here : '}, {'type': 'link', 'href': 'https://huggingface.co/blog/MonsterMMORPG/supir-sota-image-upscale-better-than-magnific-ai', 'raw': 'https://huggingface.co/blog/MonsterMMORPG/supir-sota-image-upscale-better-than-magnific-ai'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","I have dedicated several days, working over 12 hours each day, on SUPIR (Scaling-UP Image Restoration), a cutting-edge image enhancement and upscaling model introduced in the paper Scaling Up to Excellence: Practicing Model Scaling for Photo-Realistic Image Restoration In the Wild. + +This model is simply mind-blowing. At the bottom of this post, you will see side-by-side comparisons of SUPIR versus the extremely expensive online service, Magnific AI. Magnific is known to be the best among the community. However, SUPIR is by far superior. SUPIR also significantly outperforms Topaz AI upscale. SUPIR manages to remain faithful to the original image almost 100% while adding details and achieving super upscaling with the best realism. + +You can read the full blog post here : https://huggingface.co/blog/MonsterMMORPG/supir-sota-image-upscale-better-than-magnific-ai + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6345bd89fe134dfd7a0dba40/8bCg_0TAP7o-thVnRO3Lp.png'}]",[],"[{'reaction': '👍', 'users': ['ameerazam08', 'clem', 'iDrops', 'samusenps', 'diogofranciscop', 'yxxsgdmn', 'Johnnycadelover'], 'count': 7}, {'reaction': '🤯', 'users': ['adamelliotfields'], 'count': 1}, {'reaction': '🔥', 'users': ['Jwjjwk'], 'count': 1}]",2024-02-28 17:42:02,2024-02-29 16:00:16.243,"[{'_id': '6266513d539521e602b5dc3a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6266513d539521e602b5dc3a/NsKi_PW21BkNq5F-TdnHt.png', 'fullname': 'Ameer Azam', 'name': 'ameerazam08', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 125, 'isFollowing': False}, {'_id': '6345bd89fe134dfd7a0dba40', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg', 'fullname': 'Furkan Gözükara', 'name': 'MonsterMMORPG', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 646, 'isFollowing': False}, {'_id': '64aea8ff67511bd3d965697b', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64aea8ff67511bd3d965697b/Jxn52EmDF5RApJh8antxn.jpeg', 'fullname': 'Feynman Innovations', 'name': 'ajibawa-2023', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 161, 'isFollowing': False}]",/posts/MonsterMMORPG/591814698484680,515,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1647276617786-5f04e8865d08220171a0ad3f.png,48.0,François Remy,FremyCompany,650521542566757,"[{'type': 'text', 'value': ""🔥 What's that biomedical model that got 170,763 downloads last month on HuggingFace?! Well, the paper is finally published! #BioLORD"", 'raw': ""🔥 What's that biomedical model that got 170,763 downloads last month on HuggingFace?! Well, the paper is finally published! #BioLORD""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📰 Read our article in the Journal of the American Medical Informatics Association:', 'raw': '📰 Read our article in the Journal of the American Medical Informatics Association:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://academic.oup.com/jamia/advance-article/doi/10.1093/jamia/ocae029/7614965', 'raw': 'https://academic.oup.com/jamia/advance-article/doi/10.1093/jamia/ocae029/7614965'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📝', 'raw': '📝'}, {'type': 'inline_code', 'code': 'TLDR:', 'raw': '`TLDR:`'}, {'type': 'text', 'value': "" BioLORD-2023 is a series of semantic language models for the biomedical domain, capable of representing clinical concepts and sentences in a semantic space aligned with human preferences. Our new multilingual version supports 50+ languages and is further finetuned on 7 European languages. These models were trained contrastively and through distillations, using a corpus unifying in the same latent space the concept names of biomedical concepts and their descriptions. For concepts which didn't have a description written by humans in UMLS, we use information contained in the SnomedCT knowledge graph and the capabilities of ChatGPT to generate synthetic data and improve our results."", 'raw': "" BioLORD-2023 is a series of semantic language models for the biomedical domain, capable of representing clinical concepts and sentences in a semantic space aligned with human preferences. Our new multilingual version supports 50+ languages and is further finetuned on 7 European languages. These models were trained contrastively and through distillations, using a corpus unifying in the same latent space the concept names of biomedical concepts and their descriptions. For concepts which didn't have a description written by humans in UMLS, we use information contained in the SnomedCT knowledge graph and the capabilities of ChatGPT to generate synthetic data and improve our results.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤗 Access our models from the HuggingFace hub, including the new 2023-C and 2023-S variants:', 'raw': '🤗 Access our models from the HuggingFace hub, including the new 2023-C and 2023-S variants:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'FremyCompany/BioLORD-2023'}, 'url': 'https://huggingface.co/FremyCompany/BioLORD-2023', 'raw': 'https://huggingface.co/FremyCompany/BioLORD-2023'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'FremyCompany/BioLORD-2023-M'}, 'url': 'https://huggingface.co/FremyCompany/BioLORD-2023-M', 'raw': 'https://huggingface.co/FremyCompany/BioLORD-2023-M'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'FremyCompany/BioLORD-2023-S'}, 'url': 'https://huggingface.co/FremyCompany/BioLORD-2023-S', 'raw': 'https://huggingface.co/FremyCompany/BioLORD-2023-S'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'FremyCompany/BioLORD-2023-C'}, 'url': 'https://huggingface.co/FremyCompany/BioLORD-2023-C', 'raw': 'https://huggingface.co/FremyCompany/BioLORD-2023-C'}, {'type': 'new_line', 'raw': '\n'}]","🔥 What's that biomedical model that got 170,763 downloads last month on HuggingFace?! Well, the paper is finally published! #BioLORD + +📰 Read our article in the Journal of the American Medical Informatics Association: +https://academic.oup.com/jamia/advance-article/doi/10.1093/jamia/ocae029/7614965 + +📝`TLDR:` BioLORD-2023 is a series of semantic language models for the biomedical domain, capable of representing clinical concepts and sentences in a semantic space aligned with human preferences. Our new multilingual version supports 50+ languages and is further finetuned on 7 European languages. These models were trained contrastively and through distillations, using a corpus unifying in the same latent space the concept names of biomedical concepts and their descriptions. For concepts which didn't have a description written by humans in UMLS, we use information contained in the SnomedCT knowledge graph and the capabilities of ChatGPT to generate synthetic data and improve our results. + +🤗 Access our models from the HuggingFace hub, including the new 2023-C and 2023-S variants: +https://huggingface.co/FremyCompany/BioLORD-2023 +https://huggingface.co/FremyCompany/BioLORD-2023-M +https://huggingface.co/FremyCompany/BioLORD-2023-S +https://huggingface.co/FremyCompany/BioLORD-2023-C +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f04e8865d08220171a0ad3f/sK22MD2CCD2lDhajQoaRV.png'}]",[],"[{'reaction': '👍', 'users': ['ilGawo', 'cdevelder', 'osanseviero', 'katielink', 'julien-c', 'clem', 'macadeliccc', 'UMCU', 'danielhanchen', 'samusenps', 'yxxsgdmn', 'pdelobelle'], 'count': 12}, {'reaction': '❤️', 'users': ['clem', 'Concor', 'drak-hf', 'samusenps'], 'count': 4}]",2024-02-28 15:25:32,2024-02-29 14:59:45.605,"[{'_id': '618b9a79ba796dc2bf3f4412', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/618b9a79ba796dc2bf3f4412/66ustwEp1EDU_rxcXJBIO.jpeg', 'fullname': 'Bram van Es', 'name': 'UMCU', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}]",/posts/FremyCompany/650521542566757,827,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,987671089789196,"[{'type': 'text', 'value': 'The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits', 'raw': 'The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.17764'}, 'url': 'https://huggingface.co/papers/2402.17764', 'raw': 'https://huggingface.co/papers/2402.17764', 'label': 'The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits (2402.17764)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Recent research, such as BitNet, is paving the way for a new era of 1-bit Large Language Models (LLMs). In this work, we introduce a 1-bit LLM variant, namely BitNet b1.58, in which every single parameter (or weight) of the LLM is ternary {-1, 0, 1}. It matches the full-precision (i.e., FP16 or BF16) Transformer LLM with the same model size and training tokens in terms of both perplexity and end-task performance, while being significantly more cost-effective in terms of latency, memory, throughput, and energy consumption. More profoundly, the 1.58-bit LLM defines a new scaling law and recipe for training new generations of LLMs that are both high-performance and cost-effective. Furthermore, it enables a new computation paradigm and opens the door for designing specific hardware optimized for 1-bit LLMs.', 'raw': 'Recent research, such as BitNet, is paving the way for a new era of 1-bit Large Language Models (LLMs). In this work, we introduce a 1-bit LLM variant, namely BitNet b1.58, in which every single parameter (or weight) of the LLM is ternary {-1, 0, 1}. It matches the full-precision (i.e., FP16 or BF16) Transformer LLM with the same model size and training tokens in terms of both perplexity and end-task performance, while being significantly more cost-effective in terms of latency, memory, throughput, and energy consumption. More profoundly, the 1.58-bit LLM defines a new scaling law and recipe for training new generations of LLMs that are both high-performance and cost-effective. Furthermore, it enables a new computation paradigm and opens the door for designing specific hardware optimized for 1-bit LLMs.'}]","The Era of 1-bit LLMs: All Large Language Models are in 1.58 Bits + +https://huggingface.co/papers/2402.17764 + +Recent research, such as BitNet, is paving the way for a new era of 1-bit Large Language Models (LLMs). In this work, we introduce a 1-bit LLM variant, namely BitNet b1.58, in which every single parameter (or weight) of the LLM is ternary {-1, 0, 1}. It matches the full-precision (i.e., FP16 or BF16) Transformer LLM with the same model size and training tokens in terms of both perplexity and end-task performance, while being significantly more cost-effective in terms of latency, memory, throughput, and energy consumption. More profoundly, the 1.58-bit LLM defines a new scaling law and recipe for training new generations of LLMs that are both high-performance and cost-effective. Furthermore, it enables a new computation paradigm and opens the door for designing specific hardware optimized for 1-bit LLMs.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/ZXjQ-1EnsQ-6FWoDu7JOR.png'}]",[],"[{'reaction': '👍', 'users': ['Azamat1k', 'bofenghuang', 'osanseviero', 'clem', 'macadeliccc', 'ssone95', 'minjejeon', 'SRDdev', 'arcdyn', 'ajibawa-2023', 'Kukedlc', 'yongzx', 'eramax', 'mvaloatto', 'kgourgou', 'danielhanchen', 'arjunsriva', 'goncharenko', 'notune', 'koflerdavid', 'krishnapraveen', 'Banshal', 'Hemanth-thunder', 'Aishou', 'northern-64bit', 'tuanlda78202', 'andrewrreed', 'CarlLee', 'rippertnt', 'joecrypto', 'feveromo', 'mathiasn1', 'RachidAR', 'SaiNikhileshReddy', 'yxxsgdmn', 'julien-c', 'femboysLover', 'victor', 'wath5'], 'count': 39}, {'reaction': '❤️', 'users': ['clem', 'macadeliccc', 'giux78', 'ssone95', 'fblgit', 'adhisetiawan', 'tuanlda78202', 'samusenps', 'mindrage', 'Parth', 'SaiNikhileshReddy', 'julien-c', 'ncard'], 'count': 13}]",2024-02-28 14:39:57,2024-02-28 14:39:57.255,[],/posts/akhaliq/987671089789196,269,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61c141342aac764ce1654e43/81AwoT5IQ_Xdw0OVw7TKu.jpeg,3482.0,Loubna Ben Allal,loubnabnl,596860170283496,"[{'type': 'text', 'value': '⭐ Today we’re releasing The Stack v2 & StarCoder2: a series of 3B, 7B & 15B code generation models trained on 3.3 to 4.5 trillion tokens of code: ', 'raw': '⭐ Today we’re releasing The Stack v2 & StarCoder2: a series of 3B, 7B & 15B code generation models trained on 3.3 to 4.5 trillion tokens of code: '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- StarCoder2-15B matches or outperforms CodeLlama 34B, and approaches DeepSeek-33B on multiple benchmarks.', 'raw': '- StarCoder2-15B matches or outperforms CodeLlama 34B, and approaches DeepSeek-33B on multiple benchmarks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- StarCoder2-3B outperforms StarCoderBase-15B and similar sized models.', 'raw': '- StarCoder2-3B outperforms StarCoderBase-15B and similar sized models.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- The Stack v2 a 4x larger dataset than the Stack v1, resulting in 900B unique code tokens 🚀', 'raw': '- The Stack v2 a 4x larger dataset than the Stack v1, resulting in 900B unique code tokens 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As always, we released everything from models and datasets to curation code. Enjoy!', 'raw': 'As always, we released everything from models and datasets to curation code. Enjoy!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 StarCoder2 collection: ', 'raw': '🔗 StarCoder2 collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'bigcode/starcoder2-65de6da6e87db3383572be1a'}, 'url': 'https://huggingface.co/collections/bigcode/starcoder2-65de6da6e87db3383572be1a', 'raw': 'https://huggingface.co/collections/bigcode/starcoder2-65de6da6e87db3383572be1a'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'link', 'href': 'https://drive.google.com/file/d/17iGn3c-sYNiLyRSY-A85QOzgzGnGiVI3/view', 'raw': 'https://drive.google.com/file/d/17iGn3c-sYNiLyRSY-A85QOzgzGnGiVI3/view'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 BlogPost: ', 'raw': '🔗 BlogPost: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/starcoder2', 'raw': 'https://huggingface.co/blog/starcoder2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Code Leaderboard: ', 'raw': '🔗 Code Leaderboard: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'bigcode/bigcode-models-leaderboard'}, 'url': 'https://huggingface.co/spaces/bigcode/bigcode-models-leaderboard', 'raw': 'https://huggingface.co/spaces/bigcode/bigcode-models-leaderboard'}]","⭐ Today we’re releasing The Stack v2 & StarCoder2: a series of 3B, 7B & 15B code generation models trained on 3.3 to 4.5 trillion tokens of code: + +- StarCoder2-15B matches or outperforms CodeLlama 34B, and approaches DeepSeek-33B on multiple benchmarks. +- StarCoder2-3B outperforms StarCoderBase-15B and similar sized models. +- The Stack v2 a 4x larger dataset than the Stack v1, resulting in 900B unique code tokens 🚀 +As always, we released everything from models and datasets to curation code. Enjoy! + +🔗 StarCoder2 collection: https://huggingface.co/collections/bigcode/starcoder2-65de6da6e87db3383572be1a +🔗 Paper: https://drive.google.com/file/d/17iGn3c-sYNiLyRSY-A85QOzgzGnGiVI3/view +🔗 BlogPost: https://huggingface.co/blog/starcoder2 +🔗 Code Leaderboard: https://huggingface.co/spaces/bigcode/bigcode-models-leaderboard","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61c141342aac764ce1654e43/xgxe4cZjwUQL-qH9yS9Of.png'}]",[],"[{'reaction': '🤗', 'users': ['lvwerra', 'DmitryRyumin', 'thomwolf', 'olivierdehaene', 'smangrul', 'osanseviero', 'mcpotato', 'alielfilali01', 'SivilTaram', 'victor', 'yuxiang630', 'BrigitteTousi', 'VictorSanh', 'harmdevries', 'RaymondLi', '3outeille', 'Azamat1k', 'vladbogo', 'bofenghuang', 'ncoop57', 'philschmid', 'Avremi', 'clem', 'admarcosai', 'macadeliccc', 'mayank-mishra', 'hysts', 'kramp', 'nouamanetazi', 'danielhanchen', 'goncharenko', 'MoritzLaurer', 'tbeck', 'not-lain', 'amyeroberts', 'andrewrreed', 'yxxsgdmn', 'seyf1elislam', 'omaryshchenko', 'mishig'], 'count': 40}, {'reaction': '❤️', 'users': ['lvwerra', 'thomwolf', 'olivierdehaene', 'smangrul', 'osanseviero', 'mcpotato', 'alielfilali01', 'SivilTaram', 'victor', 'yuxiang630', 'BrigitteTousi', 'VictorSanh', 'harmdevries', 'RaymondLi', '3outeille', 'ncoop57', 'philschmid', 'clem', 'admarcosai', 'macadeliccc', 'EdoAbati', 'euclaise', 'ajibawa-2023', 'mayank-mishra', 'dalraf', 'nouamanetazi', 'tbeck', 'not-lain', 'amyeroberts', 'andrewrreed', 'samusenps', 'seyf1elislam', 'arjunguha', 'mishig', 'Ramikan-BR'], 'count': 35}, {'reaction': '🤯', 'users': ['lvwerra', 'thomwolf', 'olivierdehaene', 'smangrul', 'osanseviero', 'SivilTaram', 'victor', 'yuxiang630', 'BrigitteTousi', 'RaymondLi', '3outeille', 'hlarcher', 'OmBenz', 'philschmid', 'Abderrazak', 'clem', 'admarcosai', 'mayank-mishra', 'not-lain', 'mishig'], 'count': 20}, {'reaction': '🚀', 'users': ['mishig'], 'count': 1}]",2024-02-28 14:18:14,2024-02-28 14:21:17.956,[],/posts/loubnabnl/596860170283496,1354,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,146398971160140,"[{'type': 'text', 'value': '🌟🎭✨ Exciting News! The Latest in Expressive Video Portrait Generation! 🌟🎭✨', 'raw': '🌟🎭✨ Exciting News! The Latest in Expressive Video Portrait Generation! 🌟🎭✨'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: EMO: Emote Portrait Alive - Generating Expressive Portrait Videos with Audio2Video Diffusion Model under Weak Conditions', 'raw': '📄 Title: EMO: Emote Portrait Alive - Generating Expressive Portrait Videos with Audio2Video Diffusion Model under Weak Conditions'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: Linrui Tian, ', 'raw': '👥 Authors: Linrui Tian, '}, {'type': 'mention', 'user': 'lucaskingjade', 'raw': '@lucaskingjade'}, {'type': 'text', 'value': ', Bang Zhang, and ', 'raw': ', Bang Zhang, and '}, {'type': 'mention', 'user': 'Liefeng', 'raw': '@Liefeng'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.17485'}, 'url': 'https://huggingface.co/papers/2402.17485', 'raw': 'https://huggingface.co/papers/2402.17485', 'label': 'EMO: Emote Portrait Alive - Generating Expressive Portrait Videos with\n Audio2Video Diffusion Model under Weak Conditions (2402.17485)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Github Page: ', 'raw': '🔗 Github Page: '}, {'type': 'link', 'href': 'https://humanaigc.github.io/emote-portrait-alive', 'raw': 'https://humanaigc.github.io/emote-portrait-alive'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Repository: ', 'raw': '🔗 Repository: '}, {'type': 'link', 'href': 'https://github.com/HumanAIGC/EMO', 'raw': 'https://github.com/HumanAIGC/EMO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #EMO #EmotePortrait #Audio2VideoDiffusion #ExpressiveAnimations #VideoGeneration #DigitalArt #HumanExpression #ComputerVision #DeepLearning #AI', 'raw': '🔍 Keywords: #EMO #EmotePortrait #Audio2VideoDiffusion #ExpressiveAnimations #VideoGeneration #DigitalArt #HumanExpression #ComputerVision #DeepLearning #AI'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🚀 Added to the Avatars Collection: ', 'raw': '🚀 Added to the Avatars Collection: '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}, 'url': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36', 'raw': 'https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36'}]","🌟🎭✨ Exciting News! The Latest in Expressive Video Portrait Generation! 🌟🎭✨ + +📄 Title: EMO: Emote Portrait Alive - Generating Expressive Portrait Videos with Audio2Video Diffusion Model under Weak Conditions + +👥 Authors: Linrui Tian, @lucaskingjade, Bang Zhang, and @Liefeng + +🔗 Paper: https://huggingface.co/papers/2402.17485 +🔗 Github Page: https://humanaigc.github.io/emote-portrait-alive +🔗 Repository: https://github.com/HumanAIGC/EMO + +🔍 Keywords: #EMO #EmotePortrait #Audio2VideoDiffusion #ExpressiveAnimations #VideoGeneration #DigitalArt #HumanExpression #ComputerVision #DeepLearning #AI + +🚀 Added to the Avatars Collection: https://huggingface.co/collections/DmitryRyumin/avatars-65df37cdf81fec13d4dbac36","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Ml36qlNX-Fjt-ADSs9kKR.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/A89GsKkmUz-hukkQqY09c.png'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/AhyXqhOqt62tzvwCtfyAg.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/t5HVn8Cbo7A7IhUrhdI4w.mp4'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/FIWZmC2GrP6Em2uUA1eJC.mp4'}]","[{'_id': '63d0cc736b985b0f25d0412c', 'avatarUrl': '/avatars/3eb8c79f9a7c4c819038ea7b04e323dd.svg', 'fullname': 'Bo', 'name': 'Liefeng', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}, {'_id': '65df1f1ee98700500d4c289c', 'avatarUrl': '/avatars/be11bf61465df29ac997cc0fedad1cb9.svg', 'fullname': 'qi wang', 'name': 'lucaskingjade', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '❤️', 'users': ['s3nh', 'osanseviero', 'DmitryRyumin', 'Azamat1k', 'aloobun', 'sachaarbonel', 'plmsmile', 'ajibawa-2023', 'victor', 'mvaloatto', 'Hemanth-thunder', 'samusenps', 'CedrickChu', 'thomwolf', 'newxuyangcao', 'oviniciusfeitosa', 'brunnacroches'], 'count': 17}, {'reaction': '🤯', 'users': ['MalikIbrar', 'fffiloni', 'dkyazze'], 'count': 3}, {'reaction': '🤗', 'users': ['MalikIbrar', 'thomwolf', 'yxxsgdmn'], 'count': 3}, {'reaction': '🚀', 'users': ['brunnacroches'], 'count': 1}]",2024-02-28 13:52:43,2025-01-07 22:52:56.938,"[{'_id': '66504e83a120d855a7ff3a22', 'avatarUrl': '/avatars/f4e544fa2bc6b0570b1cb4678a60cf52.svg', 'fullname': 'Fhhdfjiok', 'name': 'Gjokh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/DmitryRyumin/146398971160140,583,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png,273.0,Ali El Filali,alielfilali01,276108444421787,"[{'type': 'text', 'value': 'Super excited to share with you all our latest contribution from 2A2I.', 'raw': 'Super excited to share with you all our latest contribution from 2A2I.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Today we announce : ', 'raw': 'Today we announce : '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': '2A2I/Arabic-OpenHermes-2.5'}, 'url': 'https://huggingface.co/datasets/2A2I/Arabic-OpenHermes-2.5', 'raw': 'https://huggingface.co/datasets/2A2I/Arabic-OpenHermes-2.5'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Arabic-OpenHermes-2.5 Is simply the translation of the original dataset released by ', 'raw': 'Arabic-OpenHermes-2.5 Is simply the translation of the original dataset released by '}, {'type': 'mention', 'user': 'teknium', 'raw': '@teknium'}, {'type': 'text', 'value': ' couple months ago ! ', 'raw': ' couple months ago ! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In fact it looks as a simple task ! In reality it was quite a laborious job ! ', 'raw': 'In fact it looks as a simple task ! In reality it was quite a laborious job ! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'But thanks to ', 'raw': 'But thanks to '}, {'type': 'mention', 'user': 'maghwa', 'raw': '@maghwa'}, {'type': 'text', 'value': ' & ', 'raw': ' & '}, {'type': 'mention', 'user': 'medmac01', 'raw': '@medmac01'}, {'type': 'text', 'value': ' this dataset managed to see the light today and help creating better / more aligned arabic LLMs in the near future.', 'raw': ' this dataset managed to see the light today and help creating better / more aligned arabic LLMs in the near future.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'If you are interested to join us and/or help us, please leave a comment below or visit our HuggingFace Org Card for more details about How/What you can do.', 'raw': 'If you are interested to join us and/or help us, please leave a comment below or visit our HuggingFace Org Card for more details about How/What you can do.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'More datasets to come and more models are in the way 🔥', 'raw': 'More datasets to come and more models are in the way 🔥'}, {'type': 'new_line', 'raw': '\n'}]","Super excited to share with you all our latest contribution from 2A2I. + +Today we announce : https://huggingface.co/datasets/2A2I/Arabic-OpenHermes-2.5 + +Arabic-OpenHermes-2.5 Is simply the translation of the original dataset released by @teknium couple months ago ! +In fact it looks as a simple task ! In reality it was quite a laborious job ! +But thanks to @maghwa & @medmac01 this dataset managed to see the light today and help creating better / more aligned arabic LLMs in the near future. + +If you are interested to join us and/or help us, please leave a comment below or visit our HuggingFace Org Card for more details about How/What you can do. + +More datasets to come and more models are in the way 🔥 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626237d9bbcbd1c34f1bb231/tnl-Lil3yRr5wHdcFjWiM.png'}]","[{'_id': '64d5698102e58cc1fdd0b585', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64d5698102e58cc1fdd0b585/LK9iASnZnk6AlL3J5FfWV.png', 'fullname': 'Marwa El Kamil', 'name': 'maghwa', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 41}, {'_id': '640603e2c3ab325efa94bc4a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/640603e2c3ab325efa94bc4a/jBLC7JH2dBAkDHYzFXZmr.jpeg', 'fullname': 'Mohammed Machrouh', 'name': 'medmac01', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 42}, {'_id': '6317aade83d8d2fd903192d9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6317aade83d8d2fd903192d9/erOwgMXc_CZih3uMoyTAp.jpeg', 'fullname': 'Teknium', 'name': 'teknium', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5613}]","[{'reaction': '❤️', 'users': ['medmac01', 'NovoCode', 'osanseviero', 'ajibawa-2023', 'maghwa', 'mvaloatto', 'sayhan', 'samusenps'], 'count': 8}, {'reaction': '🤗', 'users': ['medmac01', 'maghwa', 'sayhan'], 'count': 3}, {'reaction': '👍', 'users': ['alphaprime90'], 'count': 1}]",2024-02-28 13:36:44,2024-02-28 13:38:15.145,[],/posts/alielfilali01/276108444421787,92,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1638132956881-5fca176d1d7a08cb34d79d5d.jpeg,250.0,Sourab Mangrulkar,smangrul,573120738895551,"[{'type': 'text', 'value': '🚨 New Release of 🤗PEFT!', 'raw': '🚨 New Release of 🤗PEFT!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. New methods for merging LoRA weights. Refer this HF Post for more details: ', 'raw': '1. New methods for merging LoRA weights. Refer this HF Post for more details: '}, {'type': 'link', 'href': 'https://huggingface.co/posts/smangrul/850816632583824', 'raw': 'https://huggingface.co/posts/smangrul/850816632583824'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. AWQ and AQLM support for LoRA. You can now:', 'raw': '2. AWQ and AQLM support for LoRA. You can now:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Train adapters on top of 2-bit quantized models with AQLM', 'raw': '- Train adapters on top of 2-bit quantized models with AQLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Train adapters on top of powerful AWQ quantized models', 'raw': '- Train adapters on top of powerful AWQ quantized models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Note for inference you can't merge the LoRA weights into the base model!"", 'raw': ""Note for inference you can't merge the LoRA weights into the base model!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. DoRA support: Enabling DoRA is as easy as adding ', 'raw': '3. DoRA support: Enabling DoRA is as easy as adding '}, {'type': 'inline_code', 'code': 'use_dora=True', 'raw': '`use_dora=True`'}, {'type': 'text', 'value': ' to your ', 'raw': ' to your '}, {'type': 'inline_code', 'code': 'LoraConfig', 'raw': '`LoraConfig`'}, {'type': 'text', 'value': '. Find out more about this method here: ', 'raw': '. Find out more about this method here: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2402.09353', 'raw': 'https://arxiv.org/abs/2402.09353'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Improved documentation, particularly docs regarding PEFT LoRA+DeepSpeed and PEFT LoRA+FSDP! 📄 Check out the docs at ', 'raw': '4. Improved documentation, particularly docs regarding PEFT LoRA+DeepSpeed and PEFT LoRA+FSDP! 📄 Check out the docs at '}, {'type': 'link', 'href': 'https://huggingface.co/docs/peft/index', 'raw': 'https://huggingface.co/docs/peft/index'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5. Full Release Notes: ', 'raw': '5. Full Release Notes: '}, {'type': 'link', 'href': 'https://github.com/huggingface/peft/releases/tag/v0.9.0', 'raw': 'https://github.com/huggingface/peft/releases/tag/v0.9.0'}, {'type': 'new_line', 'raw': '\n'}]","🚨 New Release of 🤗PEFT! + +1. New methods for merging LoRA weights. Refer this HF Post for more details: https://huggingface.co/posts/smangrul/850816632583824 + +2. AWQ and AQLM support for LoRA. You can now: +- Train adapters on top of 2-bit quantized models with AQLM +- Train adapters on top of powerful AWQ quantized models +Note for inference you can't merge the LoRA weights into the base model! + +3. DoRA support: Enabling DoRA is as easy as adding `use_dora=True` to your `LoraConfig`. Find out more about this method here: https://arxiv.org/abs/2402.09353 + +4. Improved documentation, particularly docs regarding PEFT LoRA+DeepSpeed and PEFT LoRA+FSDP! 📄 Check out the docs at https://huggingface.co/docs/peft/index. + +5. Full Release Notes: https://github.com/huggingface/peft/releases/tag/v0.9.0 +",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'Kukedlc', 'clem', 'Ji-Ha', 'samusenps', 'alielfilali01', 'mudogruer', 'ybelkada', 'ryokeken', 'Cstark', 'dahwinsingularity'], 'count': 11}, {'reaction': '👍', 'users': ['NickyNicky', 'Jenish-23', 'ixaxaar', 'ababio', 'rizwan-ai', 'ybelkada', 'Cstark', 'ZennyKenny', 'catastropiyush'], 'count': 9}]",2024-02-28 11:51:55,2024-12-09 11:46:41.689,"[{'_id': '64ec350d2fa1391181d4493c', 'avatarUrl': '/avatars/c02f9f8d27cb83be65dba0c7b945daa4.svg', 'fullname': 'Jenish-23', 'name': 'Jenish-23', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '5fca176d1d7a08cb34d79d5d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1638132956881-5fca176d1d7a08cb34d79d5d.jpeg', 'fullname': 'Sourab Mangrulkar', 'name': 'smangrul', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 250, 'isFollowing': False}, {'_id': '62441d1d9fdefb55a0b7d12c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1648631057413-noauth.png', 'fullname': 'Younes B', 'name': 'ybelkada', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 470, 'isFollowing': False}, {'_id': '626237d9bbcbd1c34f1bb231', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png', 'fullname': 'Ali El Filali', 'name': 'alielfilali01', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 273, 'isFollowing': False}, {'_id': '670f7b4ab7a7aedd4f3334c6', 'avatarUrl': '/avatars/38e83fc39ecc88fb0bc30a6766cf6a94.svg', 'fullname': 'Zaid Ahmad Awan', 'name': 'zaidawan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/smangrul/573120738895551,794,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg,3949.0,Victor Mustar,victor,718473545021746,"[{'type': 'text', 'value': ""🆕 There's now a visible description on Spaces previews. You can set the description of your Spaces by editing their README.md - it should make your Spaces more discoverable 🚀."", 'raw': ""🆕 There's now a visible description on Spaces previews. You can set the description of your Spaces by editing their README.md - it should make your Spaces more discoverable 🚀.""}]",🆕 There's now a visible description on Spaces previews. You can set the description of your Spaces by editing their README.md - it should make your Spaces more discoverable 🚀.,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/J0yut5CpC32RTCANJr3lW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f17f0a0925b9863e28ad517/YkegKXOOm5vCrlldfVRXK.png'}]",[],"[{'reaction': '❤️', 'users': ['Felladrin', 'osanseviero', 'mvaloatto', 'clefourrier', 'alielfilali01', 'clem', 'Vokturz', 'samusenps', 'ojasvisingh786'], 'count': 9}, {'reaction': '👍', 'users': ['fffiloni', 'Tonic'], 'count': 2}]",2024-02-28 09:36:53,2024-02-28 12:41:20.677,"[{'_id': '63893d4c184615e463aa24b8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63893d4c184615e463aa24b8/S1flsX_26OF6ZJBVcPlaf.jpeg', 'fullname': 'Matt Valoatto', 'name': 'mvaloatto', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 65, 'isFollowing': False}]",/posts/victor/718473545021746,696,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,894884783923541,"[{'type': 'text', 'value': 'LLM hallucination detection papers be like:', 'raw': 'LLM hallucination detection papers be like:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* check the image to get the joke 👀', 'raw': '* check the image to get the joke 👀'}]","LLM hallucination detection papers be like: + +* check the image to get the joke 👀","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/BI8LV88qlbHuNO0o9He7A.png'}]",[],"[{'reaction': '🤗', 'users': ['fahnub', 'Kukedlc', 'Doma02', 'gsarti', 'sayhan', 'MarinaraSpaghetti', 'vijay8642', 'dashfunnydashdash', 'Vermilion3154', 'Kirkalish', 'notoookay'], 'count': 11}]",2024-02-21 17:26:22,2024-02-21 17:27:22.927,[],/posts/santiviquez/894884783923541,14,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,707975049025193,"[{'type': 'text', 'value': ' Neural Network Diffusion', 'raw': ' Neural Network Diffusion'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.13144'}, 'url': 'https://huggingface.co/papers/2402.13144', 'raw': 'https://huggingface.co/papers/2402.13144', 'label': 'Neural Network Diffusion (2402.13144)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Diffusion models have achieved remarkable success in image and video generation. In this work, we demonstrate that diffusion models can also generate high-performing neural network parameters. Our approach is simple, utilizing an autoencoder and a standard latent diffusion model. The autoencoder extracts latent representations of a subset of the trained network parameters. A diffusion model is then trained to synthesize these latent parameter representations from random noise. It then generates new representations that are passed through the autoencoder's decoder, whose outputs are ready to use as new subsets of network parameters. Across various architectures and datasets, our diffusion process consistently generates models of comparable or improved performance over trained networks, with minimal additional cost. Notably, we empirically find that the generated models perform differently with the trained networks. Our results encourage more exploration on the versatile use of diffusion models."", 'raw': ""Diffusion models have achieved remarkable success in image and video generation. In this work, we demonstrate that diffusion models can also generate high-performing neural network parameters. Our approach is simple, utilizing an autoencoder and a standard latent diffusion model. The autoencoder extracts latent representations of a subset of the trained network parameters. A diffusion model is then trained to synthesize these latent parameter representations from random noise. It then generates new representations that are passed through the autoencoder's decoder, whose outputs are ready to use as new subsets of network parameters. Across various architectures and datasets, our diffusion process consistently generates models of comparable or improved performance over trained networks, with minimal additional cost. Notably, we empirically find that the generated models perform differently with the trained networks. Our results encourage more exploration on the versatile use of diffusion models.""}]"," Neural Network Diffusion + +https://huggingface.co/papers/2402.13144 + +Diffusion models have achieved remarkable success in image and video generation. In this work, we demonstrate that diffusion models can also generate high-performing neural network parameters. Our approach is simple, utilizing an autoencoder and a standard latent diffusion model. The autoencoder extracts latent representations of a subset of the trained network parameters. A diffusion model is then trained to synthesize these latent parameter representations from random noise. It then generates new representations that are passed through the autoencoder's decoder, whose outputs are ready to use as new subsets of network parameters. Across various architectures and datasets, our diffusion process consistently generates models of comparable or improved performance over trained networks, with minimal additional cost. Notably, we empirically find that the generated models perform differently with the trained networks. Our results encourage more exploration on the versatile use of diffusion models.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/j72KYwskrHzgqZKJk7RWG.png'}]",[],"[{'reaction': '❤️', 'users': ['osanseviero', 'kgourgou', 'SanRosenberg', 'vladbogo', 'gsarti', 'Salwa-Zeitoun', 'SuperShark', 'takeraparterer', 'DataSoul'], 'count': 9}]",2024-02-21 15:41:36,2024-02-21 15:41:36.556,[],/posts/akhaliq/707975049025193,61,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1644340617257-noauth.png,641.0,Clémentine Fourrier,clefourrier,800054519615389,"[{'type': 'text', 'value': 'New base pretrained models on the Open LLM Leaderboard!', 'raw': 'New base pretrained models on the Open LLM Leaderboard!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Two new OSS models by Google, who's getting back in the game 😎"", 'raw': ""Two new OSS models by Google, who's getting back in the game 😎""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The 7B is 2nd of the leaderboard, and better than Mistral (notably on GSM8K, aka math). ', 'raw': 'The 7B is 2nd of the leaderboard, and better than Mistral (notably on GSM8K, aka math). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'google/gemma-7b'}, 'url': 'https://huggingface.co/google/gemma-7b', 'raw': 'https://huggingface.co/google/gemma-7b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'google/gemma-2b'}, 'url': 'https://huggingface.co/google/gemma-2b', 'raw': 'https://huggingface.co/google/gemma-2b'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check more results on the leaderboard ', 'raw': 'Check more results on the leaderboard '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard', 'raw': 'https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard'}]","New base pretrained models on the Open LLM Leaderboard! + +Two new OSS models by Google, who's getting back in the game 😎 +The 7B is 2nd of the leaderboard, and better than Mistral (notably on GSM8K, aka math). + +https://huggingface.co/google/gemma-7b +https://huggingface.co/google/gemma-2b + +Check more results on the leaderboard https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6202a599216215a22221dea9/7cq05rNgm_dM98euk-yOu.png'}]",[],"[{'reaction': '❤️', 'users': ['tomaarsen', 'Citaman', 'Sylvestre', 'beomi', 'eramax', 'manvinder', 'macaulj1', 'osanseviero', 'lazarustda', 'Alexandro14', 'kramp', 'Rayabarapu', 'clem', 'trisfromgoogle', 'VictorSanh', 'vladbogo', 'mrfakename', 'Kukedlc', 'johndpope', 'alielfilali01', 'BrigitteTousi', 'Rybens'], 'count': 22}]",2024-02-21 13:47:11,2024-02-21 13:47:30.785,[],/posts/clefourrier/800054519615389,82,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1638132956881-5fca176d1d7a08cb34d79d5d.jpeg,250.0,Sourab Mangrulkar,smangrul,440357386436742,"[{'type': 'text', 'value': 'Exciting news for Indic LLMs! 🚀', 'raw': 'Exciting news for Indic LLMs! 🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sarvam AI just released high-quality, curated dataset with multi-turn conversations in English, Hindi, and Hinglish! 💎 With a whopping 100K samples! 🤯 ', 'raw': 'Sarvam AI just released high-quality, curated dataset with multi-turn conversations in English, Hindi, and Hinglish! 💎 With a whopping 100K samples! 🤯 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check it out: ', 'raw': 'Check it out: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'sarvamai/samvaad-hi-v1'}, 'url': 'https://huggingface.co/datasets/sarvamai/samvaad-hi-v1', 'raw': 'https://huggingface.co/datasets/sarvamai/samvaad-hi-v1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Who's going to finetune high-quality SFT models on this dataset? ✨"", 'raw': ""Who's going to finetune high-quality SFT models on this dataset? ✨""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'if you are interested in pushing the boundaries with respect to Indic LLMs, join the discord channel: ', 'raw': 'if you are interested in pushing the boundaries with respect to Indic LLMs, join the discord channel: '}, {'type': 'link', 'href': 'https://discord.gg/hugging-face-879548962464493619', 'raw': 'https://discord.gg/hugging-face-879548962464493619'}]","Exciting news for Indic LLMs! 🚀 + +Sarvam AI just released high-quality, curated dataset with multi-turn conversations in English, Hindi, and Hinglish! 💎 With a whopping 100K samples! 🤯 +Check it out: https://huggingface.co/datasets/sarvamai/samvaad-hi-v1 + +Who's going to finetune high-quality SFT models on this dataset? ✨ +if you are interested in pushing the boundaries with respect to Indic LLMs, join the discord channel: https://discord.gg/hugging-face-879548962464493619","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5fca176d1d7a08cb34d79d5d/GJNSt4ZVDPkMWdrspj0Oe.png'}]",[],"[{'reaction': '❤️', 'users': ['osanseviero', 'rajveer43', 'Rayabarapu', 'clem', 'masterfury', 'alielfilali01'], 'count': 6}, {'reaction': '👍', 'users': ['Alexandro14', 'rajveer43'], 'count': 2}]",2024-02-21 12:50:31,2024-02-21 12:50:31.702,[],/posts/smangrul/440357386436742,188,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,757862983901956,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Backward Lens: Projecting Language Model Gradients into the Vocabulary Space by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Backward Lens: Projecting Language Model Gradients into the Vocabulary Space by ""}, {'type': 'mention', 'user': 'shaharkatz', 'raw': '@shaharkatz'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'belinkov', 'raw': '@belinkov'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'mega', 'raw': '@mega'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'liorwolf', 'raw': '@liorwolf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Recent interpretability works explore intermediate model representations by projecting them to vocabulary space. This work explores projecting gradients computed from the backward pass to vocabulary space to explain how a single forward-backward pass edits LM knowledge.', 'raw': 'Recent interpretability works explore intermediate model representations by projecting them to vocabulary space. This work explores projecting gradients computed from the backward pass to vocabulary space to explain how a single forward-backward pass edits LM knowledge.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors identify a mechanism they dub “imprint and shift” in the forward module in transformer layer. Specifically, the “imprint” refers to the first layer, to or from which the learning process adds or subtracts copies of the intermediate inputs encountered during the forward pass. The “shift” refers to the second matrix, where the weights are shifted by the embedding of the target token.', 'raw': 'Authors identify a mechanism they dub “imprint and shift” in the forward module in transformer layer. Specifically, the “imprint” refers to the first layer, to or from which the learning process adds or subtracts copies of the intermediate inputs encountered during the forward pass. The “shift” refers to the second matrix, where the weights are shifted by the embedding of the target token.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors note that the dominant components in constructing gradients are derived from the outer product of the last token’s input and the Vector-Jacobian Product, and that the latter contains the embedding of the target token.', 'raw': 'Authors note that the dominant components in constructing gradients are derived from the outer product of the last token’s input and the Vector-Jacobian Product, and that the latter contains the embedding of the target token.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In light of this, a new editing approach named “forward pass shifting” is proposed to update the shifting component of a layer’s feedforward module without backpropagation, using only layer inputs and target token embeddings. The method performs on par with significantly more expensive editing approaches like ROME for single-fact editing, but is less robust to paraphrasing.', 'raw': 'In light of this, a new editing approach named “forward pass shifting” is proposed to update the shifting component of a layer’s feedforward module without backpropagation, using only layer inputs and target token embeddings. The method performs on par with significantly more expensive editing approaches like ROME for single-fact editing, but is less robust to paraphrasing.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors note that these results provide promising evidence on the possibility of finding shortcuts to fine-tuning by directly injecting knowledge in model layers.', 'raw': 'Authors note that these results provide promising evidence on the possibility of finding shortcuts to fine-tuning by directly injecting knowledge in model layers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.12865'}, 'url': 'https://huggingface.co/papers/2402.12865', 'raw': 'https://huggingface.co/papers/2402.12865', 'label': 'Backward Lens: Projecting Language Model Gradients into the Vocabulary\n Space (2402.12865)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 All daily picks in LM interpretability: ', 'raw': '🔍 All daily picks in LM interpretability: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9', 'raw': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Backward Lens: Projecting Language Model Gradients into the Vocabulary Space by @shaharkatz @belinkov @mega @liorwolf + +Recent interpretability works explore intermediate model representations by projecting them to vocabulary space. This work explores projecting gradients computed from the backward pass to vocabulary space to explain how a single forward-backward pass edits LM knowledge. + +Authors identify a mechanism they dub “imprint and shift” in the forward module in transformer layer. Specifically, the “imprint” refers to the first layer, to or from which the learning process adds or subtracts copies of the intermediate inputs encountered during the forward pass. The “shift” refers to the second matrix, where the weights are shifted by the embedding of the target token. + +Authors note that the dominant components in constructing gradients are derived from the outer product of the last token’s input and the Vector-Jacobian Product, and that the latter contains the embedding of the target token. + +In light of this, a new editing approach named “forward pass shifting” is proposed to update the shifting component of a layer’s feedforward module without backpropagation, using only layer inputs and target token embeddings. The method performs on par with significantly more expensive editing approaches like ROME for single-fact editing, but is less robust to paraphrasing. + +Authors note that these results provide promising evidence on the possibility of finding shortcuts to fine-tuning by directly injecting knowledge in model layers. + +📄 Paper: https://huggingface.co/papers/2402.12865 + +🔍 All daily picks in LM interpretability: https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/X5TXloApOrd-c6OCy2vIu.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/GEb5AJU-g31rWwQ2EorC4.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/mAyWwzoVDTwA8ZRc9EQg8.png'}]","[{'_id': '614c57f1ee44bcfe57b366d6', 'avatarUrl': '/avatars/186a9aed84681246f48ed2a012c50def.svg', 'fullname': 'Yonatan Belinkov', 'name': 'belinkov', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '62f9f170363251ee40a08046', 'avatarUrl': '/avatars/67ad459db44b605be0c1f6bd33363d6e.svg', 'fullname': 'Lior Wolf', 'name': 'liorwolf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}, {'_id': '610b729f9da682cd54ad9adf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1628140189042-noauth.jpeg', 'fullname': 'Mor Geva', 'name': 'mega', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}, {'_id': '641051dcf52d7eb22e050f98', 'avatarUrl': '/avatars/dcc2e51552b4443c0352a6632d8a1001.svg', 'fullname': 'Shahar Katz', 'name': 'shaharkatz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1}]","[{'reaction': '🤝', 'users': ['Alexandro14', 'clem', 'vladbogo', 'shachar373', 'shamikbose89', 'ericsabbath'], 'count': 6}]",2024-02-21 10:55:29,2024-02-21 10:55:29.428,[],/posts/gsarti/757862983901956,68,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,560892585453068,"[{'type': 'text', 'value': '🚀🔥🌟 New Research Alert - ICLR 2024! 🌟🔥🚀', 'raw': '🚀🔥🌟 New Research Alert - ICLR 2024! 🌟🔥🚀'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Title: FasterViT: Fast Vision Transformers with Hierarchical Attention', 'raw': '📄 Title: FasterViT: Fast Vision Transformers with Hierarchical Attention'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👥 Authors: ', 'raw': '👥 Authors: '}, {'type': 'mention', 'user': 'ahatamiz', 'raw': '@ahatamiz'}, {'type': 'text', 'value': ', ', 'raw': ', '}, {'type': 'mention', 'user': 'slivorezzz', 'raw': '@slivorezzz'}, {'type': 'text', 'value': ' et al.', 'raw': ' et al.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📅 Conference: ICLR, May 7-11, 2024 | Vienna, Austria 🇦🇹', 'raw': '📅 Conference: ICLR, May 7-11, 2024 | Vienna, Austria 🇦🇹'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Paper: ', 'raw': '🔗 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2306.06189'}, 'url': 'https://huggingface.co/papers/2306.06189', 'raw': 'https://huggingface.co/papers/2306.06189', 'label': 'FasterViT: Fast Vision Transformers with Hierarchical Attention (2306.06189)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Model 🤖 : ', 'raw': '🔗 Model 🤖 : '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nvidia/FasterViT'}, 'url': 'https://huggingface.co/nvidia/FasterViT', 'raw': 'https://huggingface.co/nvidia/FasterViT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔗 Repo: ', 'raw': '🔗 Repo: '}, {'type': 'link', 'href': 'https://github.com/NVlabs/FasterViT', 'raw': 'https://github.com/NVlabs/FasterViT'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 More Papers: more cutting-edge research presented at other conferences in the ', 'raw': '📚 More Papers: more cutting-edge research presented at other conferences in the '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'DmitryRyumin/NewEraAI-Papers'}, 'url': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers', 'raw': 'https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers'}, {'type': 'text', 'value': ' curated by ', 'raw': ' curated by '}, {'type': 'mention', 'user': 'DmitryRyumin', 'raw': '@DmitryRyumin'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Keywords: #VisionTransformers #DeepLearning #ComputerVision #ICLR2024 #MachineLearning #HierarchicalAttention #NeuralNetworks #Research #ArtificialIntelligence #Innovation', 'raw': '🔍 Keywords: #VisionTransformers #DeepLearning #ComputerVision #ICLR2024 #MachineLearning #HierarchicalAttention #NeuralNetworks #Research #ArtificialIntelligence #Innovation'}]","🚀🔥🌟 New Research Alert - ICLR 2024! 🌟🔥🚀 +📄 Title: FasterViT: Fast Vision Transformers with Hierarchical Attention + +👥 Authors: @ahatamiz, @slivorezzz et al. + +📅 Conference: ICLR, May 7-11, 2024 | Vienna, Austria 🇦🇹 + +🔗 Paper: https://huggingface.co/papers/2306.06189 + +🔗 Model 🤖 : https://huggingface.co/nvidia/FasterViT +🔗 Repo: https://github.com/NVlabs/FasterViT + +📚 More Papers: more cutting-edge research presented at other conferences in the https://huggingface.co/spaces/DmitryRyumin/NewEraAI-Papers curated by @DmitryRyumin + +🔍 Keywords: #VisionTransformers #DeepLearning #ComputerVision #ICLR2024 #MachineLearning #HierarchicalAttention #NeuralNetworks #Research #ArtificialIntelligence #Innovation","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/EJEgQ615Wbw4FAH1X_97M.jpeg'}]","[{'_id': '64414b62603214724ebd2636', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64414b62603214724ebd2636/x9JVcJRZKZE7hdEII1JRR.jpeg', 'fullname': 'Ali', 'name': 'ahatamiz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '6493306970d925ae80523a53', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg', 'fullname': 'Dmitry Ryumin', 'name': 'DmitryRyumin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 398}, {'_id': '646d0c1c534e52f8c30500a6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646d0c1c534e52f8c30500a6/75VH8ClbRaP75BU2ONfXE.png', 'fullname': 'Pavlo Molchanov', 'name': 'pmolchanov', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 15}]","[{'reaction': '❤️', 'users': ['osanseviero', 'merve', 'Alexandro14', 'clem', 'ahatamiz', 'chansung', 'Falah', 'dillfrescott'], 'count': 8}]",2024-02-21 10:32:59,2024-03-07 22:36:04.139,[],/posts/DmitryRyumin/560892585453068,19,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/657217faabb25ed8aedd5e48/UUHAXeGtOnQBXFD3nYtf2.jpeg,117.0,Vlad Bogolin,vladbogo,668104700132150,"[{'type': 'text', 'value': 'REALIGN is a new method designed to improve the alignment of Large Language Models (LLMs) with human values by reformatting instruction data. This approach enhances LLM performance across various metrics by aligning responses with predefined criteria and evidence.', 'raw': 'REALIGN is a new method designed to improve the alignment of Large Language Models (LLMs) with human values by reformatting instruction data. This approach enhances LLM performance across various metrics by aligning responses with predefined criteria and evidence.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Key points:', 'raw': 'Key points:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* REALIGN has three steps: criteria definition, retrieval augmentation, and response reformatting', 'raw': '* REALIGN has three steps: criteria definition, retrieval augmentation, and response reformatting'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* It rewrites pairs (query, response) to enhance data quality for fine-tuning LLMs.', 'raw': '* It rewrites pairs (query, response) to enhance data quality for fine-tuning LLMs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '* It has shown significant improvements in general alignment, math reasoning and other tasks.', 'raw': '* It has shown significant improvements in general alignment, math reasoning and other tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Congrats to the authors for their work!', 'raw': 'Congrats to the authors for their work!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.12219'}, 'url': 'https://huggingface.co/papers/2402.12219', 'raw': 'https://huggingface.co/papers/2402.12219', 'label': 'Reformatted Alignment (2402.12219)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Code: ', 'raw': 'Code: '}, {'type': 'link', 'href': 'https://github.com/GAIR-NLP/ReAlign', 'raw': 'https://github.com/GAIR-NLP/ReAlign'}]","REALIGN is a new method designed to improve the alignment of Large Language Models (LLMs) with human values by reformatting instruction data. This approach enhances LLM performance across various metrics by aligning responses with predefined criteria and evidence. + +Key points: + +* REALIGN has three steps: criteria definition, retrieval augmentation, and response reformatting +* It rewrites pairs (query, response) to enhance data quality for fine-tuning LLMs. +* It has shown significant improvements in general alignment, math reasoning and other tasks. + +Congrats to the authors for their work! + +Paper: https://huggingface.co/papers/2402.12219 +Code: https://github.com/GAIR-NLP/ReAlign","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/XPvJpqOQTCxtURRKobtaT.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/HU1aULvncESaizYjJMZEW.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/yc0rk_pJotKoFNrh08cGE.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/657217faabb25ed8aedd5e48/LhNzREXJgw5lz4SsirZ7K.png'}]",[],"[{'reaction': '❤️', 'users': ['NovoCode', 'Taf2023', 'samusenps', 'Alexandro14', 'clem', 'Madisen', 'prem38719', 'xargs01', 'cyatom01', 'HR1777'], 'count': 10}, {'reaction': '🤯', 'users': ['NovoCode', 'osanseviero', 'clem', 'Madisen'], 'count': 4}, {'reaction': '👍', 'users': ['HR1777'], 'count': 1}]",2024-02-20 22:55:21,2024-02-24 11:22:42.263,"[{'_id': '659c2efde72a86b5463eb6d8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/GRfT2yDCFfnDt84gG51wj.png', 'fullname': 'Novo', 'name': 'NovoCode', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 18, 'isFollowing': False}, {'_id': '657217faabb25ed8aedd5e48', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/657217faabb25ed8aedd5e48/UUHAXeGtOnQBXFD3nYtf2.jpeg', 'fullname': 'Vlad Bogolin', 'name': 'vladbogo', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 117, 'isFollowing': False}, {'_id': '620eee53e3e81143d4fb891d', 'avatarUrl': '/avatars/5f90996e24713da8972e380cface6ecf.svg', 'fullname': 'Madisen Taylor', 'name': 'Madisen', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 27, 'isFollowing': False}, {'_id': '65c76f9466373c54d2ab772b', 'avatarUrl': '/avatars/b7ae2cc884709be678e98a2c69ebc9c2.svg', 'fullname': 'prem swami', 'name': 'prem38719', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/vladbogo/668104700132150,8,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/648a824a8ca6cf9857d1349c/wAdFg_x9Km-_Jw2ccD6DV.jpeg,3423.0,Tony Assi,tonyassi,526336600388381,"[{'type': 'text', 'value': 'MANIFESTO', 'raw': 'MANIFESTO'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After working in fashion e-commerce for years I\'ve come to the conclusion that in e-commerce we do not sell clothes... we sell images of clothes. Compressed, digital versions of physical products. As Roland Barthes pointed out in The Fashion System, a product image is a symbol or metaphor of a product. Media--in this case images--mediates the space between customer and product; viewer and object. Images can be altered, changed, corrupted, photoshopped, edited, deleted, or imagined. E-commerce products (or e-commerce photos) can thought of as a possibility space of digital pixels. AI/ML can analyze, manipulate, and create within this ""possibility space of pixels""--thus it can be observed that there are opportunities to intervene in the physical fashion world through the imagination of artificial intelligence. Not to replace human creativity--but to augment it. To make it ART-ificial. Art is an artificial representation of reality. AI images are an artificial representation of reality. The sewing machine greatly increased the efficiency of clothing production. Similarly, AI has greatly increased efficiency image production, in our case product photo production. The fashion design paradigm of the past century (design->produce->photograph) has been flipped on this head. Instead of going from physical clothing to digital image via photography--we can go from digital image to physical clothing via stable diffusion. We are writing the chapter of Understanding Media that Marshall McLuhan never imagined. Virtual production hasn\'t replaced the physical production; it has simply made it out of style. ', 'raw': 'After working in fashion e-commerce for years I\'ve come to the conclusion that in e-commerce we do not sell clothes... we sell images of clothes. Compressed, digital versions of physical products. As Roland Barthes pointed out in The Fashion System, a product image is a symbol or metaphor of a product. Media--in this case images--mediates the space between customer and product; viewer and object. Images can be altered, changed, corrupted, photoshopped, edited, deleted, or imagined. E-commerce products (or e-commerce photos) can thought of as a possibility space of digital pixels. AI/ML can analyze, manipulate, and create within this ""possibility space of pixels""--thus it can be observed that there are opportunities to intervene in the physical fashion world through the imagination of artificial intelligence. Not to replace human creativity--but to augment it. To make it ART-ificial. Art is an artificial representation of reality. AI images are an artificial representation of reality. The sewing machine greatly increased the efficiency of clothing production. Similarly, AI has greatly increased efficiency image production, in our case product photo production. The fashion design paradigm of the past century (design->produce->photograph) has been flipped on this head. Instead of going from physical clothing to digital image via photography--we can go from digital image to physical clothing via stable diffusion. We are writing the chapter of Understanding Media that Marshall McLuhan never imagined. Virtual production hasn\'t replaced the physical production; it has simply made it out of style. '}]","MANIFESTO +After working in fashion e-commerce for years I've come to the conclusion that in e-commerce we do not sell clothes... we sell images of clothes. Compressed, digital versions of physical products. As Roland Barthes pointed out in The Fashion System, a product image is a symbol or metaphor of a product. Media--in this case images--mediates the space between customer and product; viewer and object. Images can be altered, changed, corrupted, photoshopped, edited, deleted, or imagined. E-commerce products (or e-commerce photos) can thought of as a possibility space of digital pixels. AI/ML can analyze, manipulate, and create within this ""possibility space of pixels""--thus it can be observed that there are opportunities to intervene in the physical fashion world through the imagination of artificial intelligence. Not to replace human creativity--but to augment it. To make it ART-ificial. Art is an artificial representation of reality. AI images are an artificial representation of reality. The sewing machine greatly increased the efficiency of clothing production. Similarly, AI has greatly increased efficiency image production, in our case product photo production. The fashion design paradigm of the past century (design->produce->photograph) has been flipped on this head. Instead of going from physical clothing to digital image via photography--we can go from digital image to physical clothing via stable diffusion. We are writing the chapter of Understanding Media that Marshall McLuhan never imagined. Virtual production hasn't replaced the physical production; it has simply made it out of style. ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/648a824a8ca6cf9857d1349c/89efI_xvyVlcULTKVturQ.png'}]",[],"[{'reaction': '👍', 'users': ['fffiloni', 'TuringsSolutions', 'vladbogo', 'dcsdcsdasdasa', 'osanseviero', 'Taf2023', 'aidystark', 'kramp', 'samusenps', 'angelium111', 'Madisen', 'Wauplin', 'lunarflu', 'pascalnjue', 'Chieh-Yun', 'sghosh2', 'DocRace', 'ozgung', 'Krishana671', 'Jia-ao', 'Nymbo', 'strangebose2', 'marinarosa', 'MustardWhale516', 'HelloBug', 'msaaksjarvi', 'shl0th'], 'count': 27}, {'reaction': '❤️', 'users': ['Madisen', 'anthonymikinka', 'lunarflu', 'sghosh2', 'Douglas7rd', 'liyimichael', 'ozgung', 'strangebose2', 'Nymbo', 'marinarosa', 'MustardWhale516'], 'count': 11}, {'reaction': '🔥', 'users': ['varadpuntambekar', 'strangebose2', 'Nymbo', 'marinarosa'], 'count': 4}, {'reaction': '🤝', 'users': ['sghosh2', 'strangebose2', 'Nymbo'], 'count': 3}]",2024-02-20 18:03:26,2024-11-05 22:59:49.959,"[{'_id': '67069018fe9772f4a1424cf8', 'avatarUrl': '/avatars/99beab6235ecb70ceb1eae22e1f71257.svg', 'fullname': 'Максим Николаевич Подъяпольский', 'name': 'maks10263', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '6673930587330d737814ebaf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/ywTmT1E5q47vddhDrAEVy.png', 'fullname': 'maria astrah', 'name': 'dzidsi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '66c502e4faf1c11450b2e4b2', 'avatarUrl': '/avatars/d2b3b21c515d96126d29f5c882ba7b41.svg', 'fullname': 'ddd', 'name': 'muterfuka', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/tonyassi/526336600388381,17465,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,787276815476646,"[{'type': 'text', 'value': 'Fantastic Beasts (*Hallucinations*) and Where to Find Them 🔎\U0001f9cc', 'raw': 'Fantastic Beasts (*Hallucinations*) and Where to Find Them 🔎\U0001f9cc'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This paper breaks down LLM hallucinations into six different types:', 'raw': 'This paper breaks down LLM hallucinations into six different types:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1️⃣ Entity: Involves errors in nouns. Changing that single entity can make the sentence correct.', 'raw': '1️⃣ Entity: Involves errors in nouns. Changing that single entity can make the sentence correct.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2️⃣ Relation: Involves errors in verbs, prepositions, or adjectives. They can be fixed by correcting the relation.', 'raw': '2️⃣ Relation: Involves errors in verbs, prepositions, or adjectives. They can be fixed by correcting the relation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3️⃣ Contradictory: Sentences that contradict factually correct information.', 'raw': '3️⃣ Contradictory: Sentences that contradict factually correct information.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""4️⃣ Invented: When the LLM generates sentences with concepts that don't exist in the real world."", 'raw': ""4️⃣ Invented: When the LLM generates sentences with concepts that don't exist in the real world.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '5️⃣ Subjective: When the LLM generates sentences influenced by personal beliefs, feelings, biases, etc.', 'raw': '5️⃣ Subjective: When the LLM generates sentences influenced by personal beliefs, feelings, biases, etc.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""6️⃣ Unverifiable: When the LLM comes up with sentences containing information that can't be verified. E.g., Personal or private matters."", 'raw': ""6️⃣ Unverifiable: When the LLM comes up with sentences containing information that can't be verified. E.g., Personal or private matters.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The first two types of hallucinations are relatively easy to correct, given that we can rewrite them by changing the entity or relation. However, the other four would mostly need to be removed to make the sentence factually correct.', 'raw': 'The first two types of hallucinations are relatively easy to correct, given that we can rewrite them by changing the entity or relation. However, the other four would mostly need to be removed to make the sentence factually correct.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.06855'}, 'url': 'https://huggingface.co/papers/2401.06855', 'raw': 'https://huggingface.co/papers/2401.06855', 'label': 'Fine-grained Hallucination Detection and Editing for Language Models (2401.06855)'}]","Fantastic Beasts (*Hallucinations*) and Where to Find Them 🔎🧌 + +This paper breaks down LLM hallucinations into six different types: + +1️⃣ Entity: Involves errors in nouns. Changing that single entity can make the sentence correct. + +2️⃣ Relation: Involves errors in verbs, prepositions, or adjectives. They can be fixed by correcting the relation. + +3️⃣ Contradictory: Sentences that contradict factually correct information. + +4️⃣ Invented: When the LLM generates sentences with concepts that don't exist in the real world. + +5️⃣ Subjective: When the LLM generates sentences influenced by personal beliefs, feelings, biases, etc. + +6️⃣ Unverifiable: When the LLM comes up with sentences containing information that can't be verified. E.g., Personal or private matters. + +The first two types of hallucinations are relatively easy to correct, given that we can rewrite them by changing the entity or relation. However, the other four would mostly need to be removed to make the sentence factually correct. + +Paper: https://huggingface.co/papers/2401.06855","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/UbisvfnFiD1CGJMXDsEIh.png'}]",[],"[{'reaction': '🤗', 'users': ['Kukedlc', 'vladbogo', 'bisnotforbella', 'osanseviero', 'jeffboudier', 'andrewrreed', 'EpsilonPhoenix7', 'loudog2323', 'Hanyu66'], 'count': 9}, {'reaction': '🤯', 'users': ['bisnotforbella', 'Hanyu66'], 'count': 2}]",2024-02-20 16:56:53,2024-02-20 16:57:17.395,[],/posts/santiviquez/787276815476646,30,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/607997c83a565c15675055b3/KCVb16r2WHSqyRbUyp4eK.jpeg,38.0,Zach Nussbaum,zpn,268678477536555,"[{'type': 'text', 'value': 'ICYMI! Nomic Embed v1.5: Resizable Production Embeddings with Matryoshka Representation Learning', 'raw': 'ICYMI! Nomic Embed v1.5: Resizable Production Embeddings with Matryoshka Representation Learning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Variable embedding dimension from 64 <-> 768 ', 'raw': '- Variable embedding dimension from 64 <-> 768 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Outperforms text-embedding-ada-002 while achieving a 3x memory reduction', 'raw': '- Outperforms text-embedding-ada-002 while achieving a 3x memory reduction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Day 1 integrations with Langchain, LlamaIndex, MongoDB, and Sentence Transformers', 'raw': '- Day 1 integrations with Langchain, LlamaIndex, MongoDB, and Sentence Transformers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out ', 'raw': 'Check out '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nomic-ai/nomic-embed-text-v1.5'}, 'url': 'https://huggingface.co/nomic-ai/nomic-embed-text-v1.5', 'raw': 'https://huggingface.co/nomic-ai/nomic-embed-text-v1.5'}, {'type': 'text', 'value': ' for the model weights.', 'raw': ' for the model weights.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Technical report: ', 'raw': 'Technical report: '}, {'type': 'link', 'href': 'https://static.nomic.ai/reports/2024_Nomic_Embed_Text_Technical_Report.pdf', 'raw': 'https://static.nomic.ai/reports/2024_Nomic_Embed_Text_Technical_Report.pdf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blog Post: ', 'raw': 'Blog Post: '}, {'type': 'link', 'href': 'https://blog.nomic.ai/posts/nomic-embed-matryoshka', 'raw': 'https://blog.nomic.ai/posts/nomic-embed-matryoshka'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Original Tweet Thread: ', 'raw': 'Original Tweet Thread: '}, {'type': 'link', 'href': 'https://x.com/nomic_ai/status/1757782157374734665?s=20', 'raw': 'https://x.com/nomic_ai/status/1757782157374734665?s=20'}]","ICYMI! Nomic Embed v1.5: Resizable Production Embeddings with Matryoshka Representation Learning + +- Variable embedding dimension from 64 <-> 768 +- Outperforms text-embedding-ada-002 while achieving a 3x memory reduction +- Day 1 integrations with Langchain, LlamaIndex, MongoDB, and Sentence Transformers + +Check out +https://huggingface.co/nomic-ai/nomic-embed-text-v1.5 for the model weights. + +Technical report: https://static.nomic.ai/reports/2024_Nomic_Embed_Text_Technical_Report.pdf +Blog Post: https://blog.nomic.ai/posts/nomic-embed-matryoshka +Original Tweet Thread: https://x.com/nomic_ai/status/1757782157374734665?s=20","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/607997c83a565c15675055b3/S8ClSxaHRbPOM1hX528xL.webp'}]",[],"[{'reaction': '❤️', 'users': ['philschmid', 'Xenova', 'osanseviero', 'Taf2023', 'davanstrien', 'sayhan', 'linhphanff', 'Yhyu13'], 'count': 8}, {'reaction': '🤯', 'users': ['philschmid', 'Xenova', 'davanstrien'], 'count': 3}, {'reaction': '👍', 'users': ['linhphanff'], 'count': 1}]",2024-02-20 16:54:12,2024-02-22 13:14:35.427,[],/posts/zpn/268678477536555,409,,200 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,231717871154737,"[{'type': 'text', 'value': 'OS-Copilot', 'raw': 'OS-Copilot'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Towards Generalist Computer Agents with Self-Improvement', 'raw': 'Towards Generalist Computer Agents with Self-Improvement'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.07456'}, 'url': 'https://huggingface.co/papers/2402.07456', 'raw': 'https://huggingface.co/papers/2402.07456', 'label': 'OS-Copilot: Towards Generalist Computer Agents with Self-Improvement (2402.07456)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Autonomous interaction with the computer has been a longstanding challenge with great potential, and the recent proliferation of large language models (LLMs) has markedly accelerated progress in building digital agents. However, most of these agents are designed to interact with a narrow domain, such as a specific software or website. This narrow focus constrains their applicability for general computer tasks. To this end, we introduce OS-Copilot, a framework to build generalist agents capable of interfacing with comprehensive elements in an operating system (OS), including the web, code terminals, files, multimedia, and various third-party applications. We use OS-Copilot to create FRIDAY, a self-improving embodied agent for automating general computer tasks. On GAIA, a general AI assistants benchmark, FRIDAY outperforms previous methods by 35%, showcasing strong generalization to unseen applications via accumulated skills from previous tasks. We also present numerical and quantitative evidence that FRIDAY learns to control and self-improve on Excel and Powerpoint with minimal supervision. Our OS-Copilot framework and empirical findings provide infrastructure and insights for future research toward more capable and general-purpose computer agents.', 'raw': 'Autonomous interaction with the computer has been a longstanding challenge with great potential, and the recent proliferation of large language models (LLMs) has markedly accelerated progress in building digital agents. However, most of these agents are designed to interact with a narrow domain, such as a specific software or website. This narrow focus constrains their applicability for general computer tasks. To this end, we introduce OS-Copilot, a framework to build generalist agents capable of interfacing with comprehensive elements in an operating system (OS), including the web, code terminals, files, multimedia, and various third-party applications. We use OS-Copilot to create FRIDAY, a self-improving embodied agent for automating general computer tasks. On GAIA, a general AI assistants benchmark, FRIDAY outperforms previous methods by 35%, showcasing strong generalization to unseen applications via accumulated skills from previous tasks. We also present numerical and quantitative evidence that FRIDAY learns to control and self-improve on Excel and Powerpoint with minimal supervision. Our OS-Copilot framework and empirical findings provide infrastructure and insights for future research toward more capable and general-purpose computer agents.'}]","OS-Copilot + +Towards Generalist Computer Agents with Self-Improvement + +https://huggingface.co/papers/2402.07456 + +Autonomous interaction with the computer has been a longstanding challenge with great potential, and the recent proliferation of large language models (LLMs) has markedly accelerated progress in building digital agents. However, most of these agents are designed to interact with a narrow domain, such as a specific software or website. This narrow focus constrains their applicability for general computer tasks. To this end, we introduce OS-Copilot, a framework to build generalist agents capable of interfacing with comprehensive elements in an operating system (OS), including the web, code terminals, files, multimedia, and various third-party applications. We use OS-Copilot to create FRIDAY, a self-improving embodied agent for automating general computer tasks. On GAIA, a general AI assistants benchmark, FRIDAY outperforms previous methods by 35%, showcasing strong generalization to unseen applications via accumulated skills from previous tasks. We also present numerical and quantitative evidence that FRIDAY learns to control and self-improve on Excel and Powerpoint with minimal supervision. Our OS-Copilot framework and empirical findings provide infrastructure and insights for future research toward more capable and general-purpose computer agents.","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/4s8pyqAiDhdct7tSzHfJG.qt'}]",[],"[{'reaction': '❤️', 'users': ['morgan', 'osanseviero', 'clem', 'samusenps', 'angkul07', 'mindrage', 'truong-xuan-linh', 'impactframes'], 'count': 8}, {'reaction': '👍', 'users': ['vladbogo', 'clem'], 'count': 2}]",2024-02-13 15:11:28,2024-02-19 18:10:00.782,"[{'_id': '64b795b275e4fe20e9fc5aca', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64b795b275e4fe20e9fc5aca/v5AWl4XUNF54_0hyRXqBc.jpeg', 'fullname': 'Ayaan Sharif', 'name': 'Ayaan-Sharif', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7, 'isFollowing': False}]",/posts/akhaliq/231717871154737,53,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/1618571183509-5f05a97d5d08220171a0ad9d.png,19.0,Morgan McGuire,morgan,611510884072195,"[{'type': 'text', 'value': 'Fine-tuning LLMs is rad, but how do you manage all your checkpoints and evals in a production setting? ', 'raw': 'Fine-tuning LLMs is rad, but how do you manage all your checkpoints and evals in a production setting? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We partnered with ', 'raw': 'We partnered with '}, {'type': 'mention', 'user': 'hamel', 'raw': '@hamel'}, {'type': 'text', 'value': ' to ship an Enterprise Model Management course packed full of learnings for those training, evaluating and deploying models at work.', 'raw': ' to ship an Enterprise Model Management course packed full of learnings for those training, evaluating and deploying models at work.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Topics include:', 'raw': 'Topics include:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- What webhooks are & how to use them to create integrations with different tools', 'raw': '- What webhooks are & how to use them to create integrations with different tools'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- How to automate train -> eval runs', 'raw': '- How to automate train -> eval runs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Improving model governance and documentation', 'raw': '- Improving model governance and documentation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Comparing candidate and baseline models', 'raw': '- Comparing candidate and baseline models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Design patterns & recipes', 'raw': '- Design patterns & recipes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Lots more...', 'raw': '- Lots more...'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Would love to hear what you think!', 'raw': 'Would love to hear what you think!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '👉 ', 'raw': '👉 '}, {'type': 'link', 'href': 'https://www.wandb.courses/courses/enterprise-model-management', 'raw': 'https://www.wandb.courses/courses/enterprise-model-management'}, {'type': 'new_line', 'raw': '\n'}]","Fine-tuning LLMs is rad, but how do you manage all your checkpoints and evals in a production setting? + +We partnered with @hamel to ship an Enterprise Model Management course packed full of learnings for those training, evaluating and deploying models at work. + +Topics include: +- What webhooks are & how to use them to create integrations with different tools +- How to automate train -> eval runs +- Improving model governance and documentation +- Comparing candidate and baseline models +- Design patterns & recipes +- Lots more... + +Would love to hear what you think! + +👉 https://www.wandb.courses/courses/enterprise-model-management +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5f05a97d5d08220171a0ad9d/r7XrnBR0eLV3puFQ-g3I8.gif'}]","[{'_id': '60825556e2b7cc3a117b0d97', 'avatarUrl': '/avatars/dfe53ca21268c56b9648e405e6370c38.svg', 'fullname': 'Hamel Husain', 'name': 'hamel', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 22}]","[{'reaction': '❤️', 'users': ['osanseviero', 'altryne', 'macadeliccc', 'clem', 'samusenps'], 'count': 5}, {'reaction': '🤝', 'users': ['victor'], 'count': 1}]",2024-02-13 15:00:21,2024-02-13 15:00:21.658,[],/posts/morgan/611510884072195,80,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,602803860140202,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Show Me How It's Done: The Role of Explanations in Fine-Tuning Language Models by M. Ballout "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Show Me How It's Done: The Role of Explanations in Fine-Tuning Language Models by M. Ballout ""}, {'type': 'mention', 'user': 'krumnack', 'raw': '@krumnack'}, {'type': 'text', 'value': ' et al.', 'raw': ' et al.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors propose a fine-tuning procedure in which a classification task is framed as generation and augmented with a natural language explanation to clarify intermediate reasoning steps. The procedure is applied to fine-tune language models of various sizes on the ListOps dataset, containing synthetically-generated instructions on sequences of numbers.', 'raw': 'Authors propose a fine-tuning procedure in which a classification task is framed as generation and augmented with a natural language explanation to clarify intermediate reasoning steps. The procedure is applied to fine-tune language models of various sizes on the ListOps dataset, containing synthetically-generated instructions on sequences of numbers.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Authors find that explanations contribute to improving model performances across all tested model sizes and explanations lengths. Smaller language models appear to benefit the most from this approach in terms of convergence speed, performance and input length generalisation, especially when given more exhaustive explanations.', 'raw': 'Authors find that explanations contribute to improving model performances across all tested model sizes and explanations lengths. Smaller language models appear to benefit the most from this approach in terms of convergence speed, performance and input length generalisation, especially when given more exhaustive explanations.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.07543'}, 'url': 'https://huggingface.co/papers/2402.07543', 'raw': 'https://huggingface.co/papers/2402.07543', 'label': ""Show Me How It's Done: The Role of Explanations in Fine-Tuning Language\n Models (2402.07543)""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻 Code: ', 'raw': '💻 Code: '}, {'type': 'link', 'href': 'https://github.com/BalloutAI/Fine-tuning-with-Explanation', 'raw': 'https://github.com/BalloutAI/Fine-tuning-with-Explanation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 All daily picks in LM interpretability: ', 'raw': '🔍 All daily picks in LM interpretability: '}, {'type': 'link', 'href': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9', 'raw': 'https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Show Me How It's Done: The Role of Explanations in Fine-Tuning Language Models by M. Ballout @krumnack et al. + +Authors propose a fine-tuning procedure in which a classification task is framed as generation and augmented with a natural language explanation to clarify intermediate reasoning steps. The procedure is applied to fine-tune language models of various sizes on the ListOps dataset, containing synthetically-generated instructions on sequences of numbers. + +Authors find that explanations contribute to improving model performances across all tested model sizes and explanations lengths. Smaller language models appear to benefit the most from this approach in terms of convergence speed, performance and input length generalisation, especially when given more exhaustive explanations. + +📄 Paper: https://huggingface.co/papers/2402.07543 + +💻 Code: https://github.com/BalloutAI/Fine-tuning-with-Explanation + +🔍 All daily picks in LM interpretability: https://huggingface.co/collections/gsarti/daily-picks-in-interpretability-and-analysis-of-lms-65ae3339949c5675d25de2f9","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/6km88wqOCfZdLGDSFlmfz.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/ricUKIXgulX-BmXQ0_gdJ.png'}]","[{'_id': '63289a045dc16f1281811583', 'avatarUrl': '/avatars/2f3029b0fa90151ec01920174ff5b682.svg', 'fullname': 'Ulf Krumnack', 'name': 'krumnack', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '❤️', 'users': ['osanseviero', 'macadeliccc', 'clem', 'samusenps'], 'count': 4}]",2024-02-13 14:26:30,2024-02-13 14:26:30.662,[],/posts/gsarti/602803860140202,9,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png,273.0,Ali El Filali,alielfilali01,984997046329385,"[{'type': 'text', 'value': 'I love the new ', 'raw': 'I love the new '}, {'type': 'inline_code', 'code': 'Viewer', 'raw': '`Viewer`'}, {'type': 'text', 'value': "" and i didn't knew how much i needed it until now"", 'raw': "" and i didn't knew how much i needed it until now""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'sylvain', 'raw': '@sylvain'}, {'type': 'text', 'value': ' , ', 'raw': ' , '}, {'type': 'mention', 'user': 'lhoestq', 'raw': '@lhoestq'}, {'type': 'text', 'value': ' and team, GREAT JOB 🔥 and THANK YOU 🤗', 'raw': ' and team, GREAT JOB 🔥 and THANK YOU 🤗'}]","I love the new `Viewer` and i didn't knew how much i needed it until now +@sylvain , @lhoestq and team, GREAT JOB 🔥 and THANK YOU 🤗","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626237d9bbcbd1c34f1bb231/neWCKybxwscUHIn4bBP2k.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626237d9bbcbd1c34f1bb231/wxzej_yNMzhuqEd07ezKB.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/626237d9bbcbd1c34f1bb231/J0R0XTZiu9eVV_9iuzrG3.jpeg'}]","[{'_id': '5e9ecfc04957053f60648a3e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594214747713-5e9ecfc04957053f60648a3e.png', 'fullname': 'Quentin Lhoest', 'name': 'lhoestq', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 331}, {'_id': '601d5e542a594bae588537b1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1612538294062-601d5e542a594bae588537b1.jpeg', 'fullname': 'Sylvain', 'name': 'sylvain', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '👍', 'users': ['davanstrien', 'osanseviero', 'julien-c', 'macadeliccc', 'clem', 'DamarJati', 'kramp'], 'count': 7}, {'reaction': '❤️', 'users': ['clem', 'kramp', 'lhoestq'], 'count': 3}, {'reaction': '🤗', 'users': ['lhoestq'], 'count': 1}]",2024-02-13 13:17:16,2024-02-15 13:59:05.898,"[{'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}, {'_id': '626237d9bbcbd1c34f1bb231', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png', 'fullname': 'Ali El Filali', 'name': 'alielfilali01', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 273, 'isFollowing': False}, {'_id': '62d6c40ba71903fb4c1e3261', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62d6c40ba71903fb4c1e3261/SQGmYShjtveXBf8HVpxPp.jpeg', 'fullname': 'Bertrand Chevrier', 'name': 'kramp', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 114, 'isFollowing': False}]",/posts/alielfilali01/984997046329385,14,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/nRCxbVng_PPBqKd-Z3KVc.jpeg,398.0,Dmitry Ryumin,DmitryRyumin,779555658291000,"[{'type': 'text', 'value': 'In Search of the Robust Facial Expressions Recognition Model: The Visual Cross-Corpus Study, ', 'raw': 'In Search of the Robust Facial Expressions Recognition Model: The Visual Cross-Corpus Study, '}, {'type': 'mention', 'user': 'ElenaRyumina', 'raw': '@ElenaRyumina'}, {'type': 'text', 'value': ' et al.', 'raw': ' et al.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model: ', 'raw': 'Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'ElenaRyumina/face_emotion_recognition'}, 'url': 'https://huggingface.co/ElenaRyumina/face_emotion_recognition', 'raw': 'https://huggingface.co/ElenaRyumina/face_emotion_recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Demo: ', 'raw': 'Demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'ElenaRyumina/Facial_Expression_Recognition'}, 'url': 'https://huggingface.co/spaces/ElenaRyumina/Facial_Expression_Recognition', 'raw': 'https://huggingface.co/spaces/ElenaRyumina/Facial_Expression_Recognition'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Repo: ', 'raw': 'Repo: '}, {'type': 'link', 'href': 'https://github.com/ElenaRyumina/EMO-AffectNetModel', 'raw': 'https://github.com/ElenaRyumina/EMO-AffectNetModel'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Paper: ', 'raw': 'Paper: '}, {'type': 'link', 'href': 'https://www.sciencedirect.com/science/article/pii/S0925231222012656', 'raw': 'https://www.sciencedirect.com/science/article/pii/S0925231222012656'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'mention', 'user': 'ElenaRyumina', 'raw': '@ElenaRyumina'}]","In Search of the Robust Facial Expressions Recognition Model: The Visual Cross-Corpus Study, @ElenaRyumina et al. + +Model: https://huggingface.co/ElenaRyumina/face_emotion_recognition +Demo: https://huggingface.co/spaces/ElenaRyumina/Facial_Expression_Recognition +Repo: https://github.com/ElenaRyumina/EMO-AffectNetModel + +Paper: https://www.sciencedirect.com/science/article/pii/S0925231222012656 + +@ElenaRyumina","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/aT8ZIJeiJn3EqnfnNuO9V.jpeg'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/s7fc5Eiows8Q6mYtY0wwT.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/Wq4-yNAeYTjAqFHSZYaDo.gif'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6493306970d925ae80523a53/-eTLuFVOVRH6lON3GC9yE.gif'}]","[{'_id': '65aabec7c8903e28aec0ce3e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/yQDxSx0Il0jwma_u2UyJe.jpeg', 'fullname': 'Elena Ryumina', 'name': 'ElenaRyumina', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 23}]","[{'reaction': '👍', 'users': ['osanseviero', 'clem', 'samusenps', 'victor', 'fffiloni', 'gryhkn', 'kramp'], 'count': 7}, {'reaction': '❤️', 'users': ['clem', 'samusenps', 'ArthurZ', 'AIIAR'], 'count': 4}]",2024-02-13 12:45:00,2024-03-04 23:31:19.343,[],/posts/DmitryRyumin/779555658291000,32,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg,415.0,Joseph [open/acc] Pollack,Tonic,531871206992279,"[{'type': 'text', 'value': '🙋🏻\u200d♂️hey there folks ,', 'raw': '🙋🏻\u200d♂️hey there folks ,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🤗Aya has been released ! It's an absolutely massive undertaking to create a huge multilingual dataset and multilingual model of very high quality. "", 'raw': ""🤗Aya has been released ! It's an absolutely massive undertaking to create a huge multilingual dataset and multilingual model of very high quality. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Papers : ', 'raw': 'Papers : '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://cohere.com/research/papers/aya-dataset-paper-2024-02-13', 'raw': 'https://cohere.com/research/papers/aya-dataset-paper-2024-02-13'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://cohere.com/research/papers/aya-model-paper-2024-02-13', 'raw': 'https://cohere.com/research/papers/aya-model-paper-2024-02-13'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Model : ', 'raw': 'Model : '}, {'type': 'link', 'href': 'https://huggingface.co/CohereForAI/aya-101', 'raw': 'https://huggingface.co/CohereForAI/aya-101'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dataset : ', 'raw': 'Dataset : '}, {'type': 'link', 'href': 'https://huggingface.co/datasets/CohereForAI/aya_dataset', 'raw': 'https://huggingface.co/datasets/CohereForAI/aya_dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I am proud to be one of 3,000 humans who built Aya - a new massively multilingual, generative LLM that outperforms existing open-source models and covers 101 different languages. Together, we are accelerating multilingual AI. 🤗', 'raw': 'I am proud to be one of 3,000 humans who built Aya - a new massively multilingual, generative LLM that outperforms existing open-source models and covers 101 different languages. Together, we are accelerating multilingual AI. 🤗'}]","🙋🏻‍♂️hey there folks , + +🤗Aya has been released ! It's an absolutely massive undertaking to create a huge multilingual dataset and multilingual model of very high quality. + +Papers : +https://cohere.com/research/papers/aya-dataset-paper-2024-02-13 +https://cohere.com/research/papers/aya-model-paper-2024-02-13 + +Model : https://huggingface.co/CohereForAI/aya-101 +Dataset : https://huggingface.co/datasets/CohereForAI/aya_dataset + + +I am proud to be one of 3,000 humans who built Aya - a new massively multilingual, generative LLM that outperforms existing open-source models and covers 101 different languages. Together, we are accelerating multilingual AI. 🤗",[],[],"[{'reaction': '❤️', 'users': ['tellarin', 'mvonwyl', 'marcvw-sightengine', 'osanseviero', 'davanstrien', 'kramp', 'jnemecek', 'clem', 'samusenps', 'raghavprabhakar', 'MexIvanov', 'sarahooker', 'alielfilali01', 'yjernite'], 'count': 14}]",2024-02-13 11:31:22,2024-02-16 10:24:03.914,"[{'_id': '640f24edde9b00b4eb9bbf87', 'avatarUrl': '/avatars/147c412becf14094684852f3b9a306fc.svg', 'fullname': 'abdul basit', 'name': 'basit123796', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Tonic/531871206992279,27,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,712513300539997,"[{'type': 'text', 'value': 'Aya Dataset', 'raw': 'Aya Dataset'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'An Open-Access Collection for Multilingual Instruction Tuning', 'raw': 'An Open-Access Collection for Multilingual Instruction Tuning'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.06619'}, 'url': 'https://huggingface.co/papers/2402.06619', 'raw': 'https://huggingface.co/papers/2402.06619', 'label': 'Aya Dataset: An Open-Access Collection for Multilingual Instruction\n Tuning (2402.06619)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Datasets are foundational to many breakthroughs in modern artificial intelligence. Many recent achievements in the space of natural language processing (NLP) can be attributed to the finetuning of pre-trained models on a diverse set of tasks that enables a large language model (LLM) to respond to instructions. Instruction fine-tuning (IFT) requires specifically constructed and annotated datasets. However, existing datasets are almost all in the English language. In this work, our primary goal is to bridge the language gap by building a human-curated instruction-following dataset spanning 65 languages. We worked with fluent speakers of languages from around the world to collect natural instances of instructions and completions. Furthermore, we create the most extensive multilingual collection to date, comprising 513 million instances through templating and translating existing datasets across 114 languages. In total, we contribute four key resources: we develop and open-source the Aya Annotation Platform, the Aya Dataset, the Aya Collection, and the Aya Evaluation Suite. The Aya initiative also serves as a valuable case study in participatory research, involving collaborators from 119 countries. We see this as a valuable framework for future research collaborations that aim to bridge gaps in resources.', 'raw': 'Datasets are foundational to many breakthroughs in modern artificial intelligence. Many recent achievements in the space of natural language processing (NLP) can be attributed to the finetuning of pre-trained models on a diverse set of tasks that enables a large language model (LLM) to respond to instructions. Instruction fine-tuning (IFT) requires specifically constructed and annotated datasets. However, existing datasets are almost all in the English language. In this work, our primary goal is to bridge the language gap by building a human-curated instruction-following dataset spanning 65 languages. We worked with fluent speakers of languages from around the world to collect natural instances of instructions and completions. Furthermore, we create the most extensive multilingual collection to date, comprising 513 million instances through templating and translating existing datasets across 114 languages. In total, we contribute four key resources: we develop and open-source the Aya Annotation Platform, the Aya Dataset, the Aya Collection, and the Aya Evaluation Suite. The Aya initiative also serves as a valuable case study in participatory research, involving collaborators from 119 countries. We see this as a valuable framework for future research collaborations that aim to bridge gaps in resources.'}]","Aya Dataset + +An Open-Access Collection for Multilingual Instruction Tuning + +https://huggingface.co/papers/2402.06619 + +Datasets are foundational to many breakthroughs in modern artificial intelligence. Many recent achievements in the space of natural language processing (NLP) can be attributed to the finetuning of pre-trained models on a diverse set of tasks that enables a large language model (LLM) to respond to instructions. Instruction fine-tuning (IFT) requires specifically constructed and annotated datasets. However, existing datasets are almost all in the English language. In this work, our primary goal is to bridge the language gap by building a human-curated instruction-following dataset spanning 65 languages. We worked with fluent speakers of languages from around the world to collect natural instances of instructions and completions. Furthermore, we create the most extensive multilingual collection to date, comprising 513 million instances through templating and translating existing datasets across 114 languages. In total, we contribute four key resources: we develop and open-source the Aya Annotation Platform, the Aya Dataset, the Aya Collection, and the Aya Evaluation Suite. The Aya initiative also serves as a valuable case study in participatory research, involving collaborators from 119 countries. We see this as a valuable framework for future research collaborations that aim to bridge gaps in resources.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/vl5lXy3-pp6bh3sKHfpZX.png'}]",[],"[{'reaction': '❤️', 'users': ['NeuralNovel', 'alielfilali01', 'Pclanglais', 'taufiqdp', 'kargaranamir', 'samusenps', 'Dlbk', 'macadeliccc', 'osanseviero', 'vumichien', 'juancopi81', 'lhoestq', 'davanstrien', 'julien-c', 'mathiasn1', 'clem', 'raghavprabhakar', 'sugatoray', 'DataSoul', 'Ryukijano', 'grenwi'], 'count': 21}, {'reaction': '🤗', 'users': ['alielfilali01', 'osanseviero', 'vumichien', 'alvarobartt', 'davanstrien', 'julien-c', 'clem', 'Dlbk', 'Ryukijano'], 'count': 9}]",2024-02-12 23:07:55,2024-02-13 13:04:52.312,"[{'_id': '626237d9bbcbd1c34f1bb231', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png', 'fullname': 'Ali El Filali', 'name': 'alielfilali01', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 273, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '60f0608166e5701b80ed3f02', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60f0608166e5701b80ed3f02/BHso-wSWpR9b8b8CKvodC.jpeg', 'fullname': 'Alvaro Bartolome', 'name': 'alvarobartt', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1920, 'isFollowing': False}]",/posts/akhaliq/712513300539997,124,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/64ce091a9e9ca8123d7a42b0/OEPggp82RwigxNLL35LgT.jpeg,234.0,Pierre-Carl Langlais,Pclanglais,990629720161457,"[{'type': 'text', 'value': ""Today I'm releasing marginalia, a python library to perform corpus analysis and retrieve structured annotations with open LLMs like Mistral Open-Hermes-2.5: "", 'raw': ""Today I'm releasing marginalia, a python library to perform corpus analysis and retrieve structured annotations with open LLMs like Mistral Open-Hermes-2.5: ""}, {'type': 'link', 'href': 'https://github.com/Pleias/marginalia', 'raw': 'https://github.com/Pleias/marginalia'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""marginalia leverages vllm inference speed to re-generate until all the output matches an expected json structure and to send batches of several unstructured elements for enhanced patterns detections. It works especially well for bibliographies. The demo transforms a very old list (Benjamin Franklin favorite's books from 1744) into well-structured data: "", 'raw': ""marginalia leverages vllm inference speed to re-generate until all the output matches an expected json structure and to send batches of several unstructured elements for enhanced patterns detections. It works especially well for bibliographies. The demo transforms a very old list (Benjamin Franklin favorite's books from 1744) into well-structured data: ""}, {'type': 'link', 'href': 'https://colab.research.google.com/drive/1xKjK2mDDpXMaKG5YLpFhOM7jehxt0kEt?usp=sharing', 'raw': 'https://colab.research.google.com/drive/1xKjK2mDDpXMaKG5YLpFhOM7jehxt0kEt?usp=sharing'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""While marginalia can be quite flexible, it definitely isn't a general purpose tool for json generation (like outlines). I don't intend so far to extend support to more complex json structure, but really looking forward potential feedbacks and suggestions."", 'raw': ""While marginalia can be quite flexible, it definitely isn't a general purpose tool for json generation (like outlines). I don't intend so far to extend support to more complex json structure, but really looking forward potential feedbacks and suggestions.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","Today I'm releasing marginalia, a python library to perform corpus analysis and retrieve structured annotations with open LLMs like Mistral Open-Hermes-2.5: https://github.com/Pleias/marginalia + +marginalia leverages vllm inference speed to re-generate until all the output matches an expected json structure and to send batches of several unstructured elements for enhanced patterns detections. It works especially well for bibliographies. The demo transforms a very old list (Benjamin Franklin favorite's books from 1744) into well-structured data: https://colab.research.google.com/drive/1xKjK2mDDpXMaKG5YLpFhOM7jehxt0kEt?usp=sharing + +While marginalia can be quite flexible, it definitely isn't a general purpose tool for json generation (like outlines). I don't intend so far to extend support to more complex json structure, but really looking forward potential feedbacks and suggestions. + +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64ce091a9e9ca8123d7a42b0/EITi6y2L6q5n0Wvqg1Gz6.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/64ce091a9e9ca8123d7a42b0/29eg3UzF1tMDa-D0KAInk.png'}]",[],"[{'reaction': '❤️', 'users': ['davanstrien', 'osanseviero', 'merve', 'AIIAR', 'samusenps', 'gate369', 'macadeliccc', 'clem', 'dillfrescott'], 'count': 9}, {'reaction': '👍', 'users': ['esilabet'], 'count': 1}]",2024-02-12 19:24:45,2024-02-12 19:24:45.433,[],/posts/Pclanglais/990629720161457,449,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,884256831552573,"[{'type': 'text', 'value': ""Google released a paper on Chess that doesn't rely on MCTS (aka AlphaZero) ♟️ "", 'raw': ""Google released a paper on Chess that doesn't rely on MCTS (aka AlphaZero) ♟️ ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'their secret sauce is.. synthetic data pseudolabeled by Stockfish engine 😀 ', 'raw': 'their secret sauce is.. synthetic data pseudolabeled by Stockfish engine 😀 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2024 really is the year of synthetic data across all domains!', 'raw': '2024 really is the year of synthetic data across all domains!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""There's a nice discussion here, join us "", 'raw': ""There's a nice discussion here, join us ""}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.04494'}, 'url': 'https://huggingface.co/papers/2402.04494', 'raw': 'https://huggingface.co/papers/2402.04494', 'label': 'Grandmaster-Level Chess Without Search (2402.04494)'}]","Google released a paper on Chess that doesn't rely on MCTS (aka AlphaZero) ♟️ +their secret sauce is.. synthetic data pseudolabeled by Stockfish engine 😀 +2024 really is the year of synthetic data across all domains! +There's a nice discussion here, join us https://huggingface.co/papers/2402.04494",[],[],"[{'reaction': '❤️', 'users': ['alielfilali01', 'osanseviero', 'AIIAR', 'samusenps', 'nickandbro', 'Citaman', 'clem', 'cosmojg'], 'count': 8}]",2024-02-12 18:35:07,2024-02-12 22:43:54.161,"[{'_id': '626237d9bbcbd1c34f1bb231', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/626237d9bbcbd1c34f1bb231/EJrOjvAL-68qMCYdnvOrq.png', 'fullname': 'Ali El Filali', 'name': 'alielfilali01', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 273, 'isFollowing': False}, {'_id': '64df20dc22d604b137270864', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64df20dc22d604b137270864/C-1_EzY0tnrb-Cyn6lh93.jpeg', 'fullname': 'TA', 'name': 'AIIAR', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3, 'isFollowing': False}]",/posts/merve/884256831552573,71,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg,334.0,Daniel Vila,dvilasuero,680660181190026,"[{'type': 'text', 'value': '🤗 Data is better together!', 'raw': '🤗 Data is better together!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Data is essential for training good AI systems. We believe that the amazing community built around open machine learning can also work on developing amazing datasets together. ', 'raw': 'Data is essential for training good AI systems. We believe that the amazing community built around open machine learning can also work on developing amazing datasets together. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'To explore how this can be done, Argilla and Hugging Face are thrilled to announce a collaborative project where we’re asking Hugging Face community members to build a dataset consisting of LLM prompts collectively. ', 'raw': 'To explore how this can be done, Argilla and Hugging Face are thrilled to announce a collaborative project where we’re asking Hugging Face community members to build a dataset consisting of LLM prompts collectively. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What are we doing? ', 'raw': 'What are we doing? '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Using an instance of Argilla — a powerful open-source data collaboration tool — hosted on the Hugging Face Hub, we are collecting ratings of prompts based on their quality. ', 'raw': 'Using an instance of Argilla — a powerful open-source data collaboration tool — hosted on the Hugging Face Hub, we are collecting ratings of prompts based on their quality. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How Can You Contribute?', 'raw': 'How Can You Contribute?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'It’s super simple to start contributing:', 'raw': 'It’s super simple to start contributing:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Sign up if you don’t have a Hugging Face account', 'raw': '1. Sign up if you don’t have a Hugging Face account'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Go to this Argilla Space and sign in: ', 'raw': '2. Go to this Argilla Space and sign in: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/DIBT/prompt-collective', 'raw': 'https://huggingface.co/spaces/DIBT/prompt-collective'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Read the guidelines and start rating prompts! ', 'raw': '3. Read the guidelines and start rating prompts! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can also join the #data-is-better-together channel in the Hugging Face Discord. ', 'raw': 'You can also join the #data-is-better-together channel in the Hugging Face Discord. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Finally, to track the community progress we'll be updating this Gradio dashboard:"", 'raw': ""Finally, to track the community progress we'll be updating this Gradio dashboard:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/spaces/DIBT/prompt-collective-dashboard', 'raw': 'https://huggingface.co/spaces/DIBT/prompt-collective-dashboard'}, {'type': 'new_line', 'raw': '\n'}]","🤗 Data is better together! + +Data is essential for training good AI systems. We believe that the amazing community built around open machine learning can also work on developing amazing datasets together. + +To explore how this can be done, Argilla and Hugging Face are thrilled to announce a collaborative project where we’re asking Hugging Face community members to build a dataset consisting of LLM prompts collectively. + +What are we doing? +Using an instance of Argilla — a powerful open-source data collaboration tool — hosted on the Hugging Face Hub, we are collecting ratings of prompts based on their quality. + +How Can You Contribute? +It’s super simple to start contributing: + +1. Sign up if you don’t have a Hugging Face account + +2. Go to this Argilla Space and sign in: https://huggingface.co/spaces/DIBT/prompt-collective + +3. Read the guidelines and start rating prompts! + +You can also join the #data-is-better-together channel in the Hugging Face Discord. + +Finally, to track the community progress we'll be updating this Gradio dashboard: + +https://huggingface.co/spaces/DIBT/prompt-collective-dashboard +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60420dccc15e823a685f2b03/AjB8zoTKujO-RZ1nQaVBz.png'}]",[],"[{'reaction': '❤️', 'users': ['davanstrien', 'gabrielmbmb', 'osanseviero', 'ignacioct', 'sdiazlor', 'plaguss', 'julien-c', 'tomaarsen', 'nickandbro', 'victor', 'lunarflu', 'clem', 'frascuchon', 'abbanbhan', 'lewtun', 'merve', 'samusenps', 'MoritzLaurer', 'johko', 'lhoestq', 'smangrul', 'sayhan', 'medmac01', 'maghwa'], 'count': 24}, {'reaction': '🤗', 'users': ['davanstrien', 'gabrielmbmb', 'osanseviero', 'kramp', 'ignacioct', 'sdiazlor', 'julien-c', 'victor', 'severo', 'clem', 'mvaloatto', 'lewtun', 'samusenps', 'chkla', 'smangrul', 'Muttermal'], 'count': 16}, {'reaction': '🤯', 'users': ['davanstrien', 'osanseviero', 'sdiazlor', 'julien-c', 'victor', 'clem', 'smangrul'], 'count': 7}]",2024-02-12 16:04:51,2024-03-07 03:08:28.596,"[{'_id': '60f2fc91b92afccb7c34b8ed', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60f2fc91b92afccb7c34b8ed/W2-Nay12Ef4Ltyaf8EKE9.jpeg', 'fullname': 'Gabriel Martín Blázquez', 'name': 'gabrielmbmb', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 118, 'isFollowing': False}, {'_id': '60107b385ac3e86b3ea4fc34', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1627505688463-60107b385ac3e86b3ea4fc34.jpeg', 'fullname': 'Daniel van Strien', 'name': 'davanstrien', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 638, 'isFollowing': False}, {'_id': '5e1e17b6fcf41d740b6996a8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg', 'fullname': 'Bram Vanroy', 'name': 'BramVanroy', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 211, 'isFollowing': False}, {'_id': '63be122f3b0665ad51d20535', 'avatarUrl': '/avatars/83fa7f317d72ed33dd23b4ec6e655fab.svg', 'fullname': 'mariana', 'name': 'Msignal', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/dvilasuero/680660181190026,126,,5 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,809039256258115,"[{'type': 'text', 'value': 'Understanding BARTScore 🛹', 'raw': 'Understanding BARTScore 🛹'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'BARTScore is a text-generation evaluation metric that treats model evaluation as a text-generation task 🔄', 'raw': 'BARTScore is a text-generation evaluation metric that treats model evaluation as a text-generation task 🔄'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Other metrics approach the evaluation problem from different ML task perspectives; for instance, ROUGE and BLUE formulate it as an unsupervised matching task, BLUERT and COMET as a supervised regression, and BEER as a supervised ranking task.', 'raw': 'Other metrics approach the evaluation problem from different ML task perspectives; for instance, ROUGE and BLUE formulate it as an unsupervised matching task, BLUERT and COMET as a supervised regression, and BEER as a supervised ranking task.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Meanwhile, BARTScore formulates it as a text-generation task. Its idea is to leverage BART's pre-trained contextual embeddings to return a score that measures either the faithfulness, precision, recall, or F-score response of the main text-generation model."", 'raw': ""Meanwhile, BARTScore formulates it as a text-generation task. Its idea is to leverage BART's pre-trained contextual embeddings to return a score that measures either the faithfulness, precision, recall, or F-score response of the main text-generation model.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For example, if we want to measure faithfulness, the way it works is that we would take the source and the generated text from our model and use BART to calculate the log token probability of the generated text given the source; we can then weight those results and return the sum.', 'raw': 'For example, if we want to measure faithfulness, the way it works is that we would take the source and the generated text from our model and use BART to calculate the log token probability of the generated text given the source; we can then weight those results and return the sum.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'BARTScore correlates nicely with human scores, and it is relatively simple to implement.', 'raw': 'BARTScore correlates nicely with human scores, and it is relatively simple to implement.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📑 Here is the original BARTScore paper: ', 'raw': '📑 Here is the original BARTScore paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2106.11520'}, 'url': 'https://huggingface.co/papers/2106.11520', 'raw': 'https://huggingface.co/papers/2106.11520', 'label': 'BARTScore: Evaluating Generated Text as Text Generation (2106.11520)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧑\u200d💻 And the GitHub repo to use this metric: ', 'raw': '🧑\u200d💻 And the GitHub repo to use this metric: '}, {'type': 'link', 'href': 'https://github.com/neulab/BARTScore', 'raw': 'https://github.com/neulab/BARTScore'}]","Understanding BARTScore 🛹 + +BARTScore is a text-generation evaluation metric that treats model evaluation as a text-generation task 🔄 + +Other metrics approach the evaluation problem from different ML task perspectives; for instance, ROUGE and BLUE formulate it as an unsupervised matching task, BLUERT and COMET as a supervised regression, and BEER as a supervised ranking task. + +Meanwhile, BARTScore formulates it as a text-generation task. Its idea is to leverage BART's pre-trained contextual embeddings to return a score that measures either the faithfulness, precision, recall, or F-score response of the main text-generation model. + +For example, if we want to measure faithfulness, the way it works is that we would take the source and the generated text from our model and use BART to calculate the log token probability of the generated text given the source; we can then weight those results and return the sum. + +BARTScore correlates nicely with human scores, and it is relatively simple to implement. + +📑 Here is the original BARTScore paper: https://huggingface.co/papers/2106.11520 +🧑‍💻 And the GitHub repo to use this metric: https://github.com/neulab/BARTScore","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/kXlMoN0Is9snPYQIQRBEW.jpeg'}]",[],"[{'reaction': '👍', 'users': ['victor', 'osanseviero', 'clem', 'dimpu01', 'ivanfioravanti', 'firqaaa', 'i0s', 'YuvrajSingh9886', 'iky1e'], 'count': 9}]",2024-02-06 10:45:16,2024-02-06 10:45:16.854,[],/posts/santiviquez/809039256258115,1522,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,500939719344175,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Rethinking Interpretability in the Era of Large Language Models"", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Rethinking Interpretability in the Era of Large Language Models""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'by C. Singh, J. P. Inala, ', 'raw': 'by C. Singh, J. P. Inala, '}, {'type': 'mention', 'user': 'mgalley', 'raw': '@mgalley'}, {'type': 'text', 'value': ', R. Caruana, ', 'raw': ', R. Caruana, '}, {'type': 'mention', 'user': 'wyngjf', 'raw': '@wyngjf'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In this opinion piece, authors contend that the new capabilities of LLMs can deeply transform the scope of interpretability, moving from low-level explanations such as saliency maps to natural language explanations that would allow for natural interaction with users.', 'raw': 'In this opinion piece, authors contend that the new capabilities of LLMs can deeply transform the scope of interpretability, moving from low-level explanations such as saliency maps to natural language explanations that would allow for natural interaction with users.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This ambitious goal is however hindered by LM’s natural tendency to hallucinate, their large size and their inherent opaqueness. Authors highlight in particular dataset explanations for knowledge discovery, explanations’ reliability and interactive explanations as important priorities for the future of interpretability research.', 'raw': 'This ambitious goal is however hindered by LM’s natural tendency to hallucinate, their large size and their inherent opaqueness. Authors highlight in particular dataset explanations for knowledge discovery, explanations’ reliability and interactive explanations as important priorities for the future of interpretability research.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.01761'}, 'url': 'https://huggingface.co/papers/2402.01761', 'raw': 'https://huggingface.co/papers/2402.01761', 'label': 'Rethinking Interpretability in the Era of Large Language Models (2402.01761)'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Rethinking Interpretability in the Era of Large Language Models +by C. Singh, J. P. Inala, @mgalley, R. Caruana, @wyngjf + +In this opinion piece, authors contend that the new capabilities of LLMs can deeply transform the scope of interpretability, moving from low-level explanations such as saliency maps to natural language explanations that would allow for natural interaction with users. + +This ambitious goal is however hindered by LM’s natural tendency to hallucinate, their large size and their inherent opaqueness. Authors highlight in particular dataset explanations for knowledge discovery, explanations’ reliability and interactive explanations as important priorities for the future of interpretability research. + +📄 Paper: https://huggingface.co/papers/2402.01761","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/WCE27GotE9cWxGyKwBwPx.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/a6nq47yOEWGjRvjdTLSgu.png'}]","[{'_id': '5f6bed855e78cc6b0ed31dee', 'avatarUrl': '/avatars/d67edb079fa3fe7ea6f2081f7a3afe9e.svg', 'fullname': 'Michel Galley', 'name': 'mgalley', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '641904caf9d6f1d772ec7af7', 'avatarUrl': '/avatars/4a63eac71eb30f70b1a0e9d4708f26c1.svg', 'fullname': 'Jianfeng Gao', 'name': 'wyngjf', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}]","[{'reaction': '❤️', 'users': ['osanseviero', 'JairoDanielMT'], 'count': 2}]",2024-02-06 08:50:31,2024-02-06 08:50:31.137,[],/posts/gsarti/500939719344175,13,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6345bd89fe134dfd7a0dba40/72EZjBiatKqqsu96txd_v.jpeg,646.0,Furkan Gözükara,MonsterMMORPG,982415168433293,"[{'type': 'text', 'value': ""Today my RunPod pod was broken and I didn't notice until I fully did setup it. So I have written the following tutorial for how to deploy a Pod and also verify it is not broken."", 'raw': ""Today my RunPod pod was broken and I didn't notice until I fully did setup it. So I have written the following tutorial for how to deploy a Pod and also verify it is not broken.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can read on:', 'raw': 'You can read on:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Patreon (public) : ', 'raw': 'Patreon (public) : '}, {'type': 'link', 'href': 'https://www.patreon.com/posts/how-to-deploy-on-97919576', 'raw': 'https://www.patreon.com/posts/how-to-deploy-on-97919576'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Medium (public) : ', 'raw': 'Medium (public) : '}, {'type': 'link', 'href': 'https://medium.com/@furkangozukara/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working-20e47031c0b5', 'raw': 'https://medium.com/@furkangozukara/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working-20e47031c0b5'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'CivitAI (public) : ', 'raw': 'CivitAI (public) : '}, {'type': 'link', 'href': 'https://civitai.com/articles/3994/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working', 'raw': 'https://civitai.com/articles/3994/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'LinkedIn (public) : ', 'raw': 'LinkedIn (public) : '}, {'type': 'link', 'href': 'https://www.linkedin.com/pulse/how-deploy-pod-runpod-verify-working-furkan-g%2525C3%2525B6z%2525C3%2525BCkara-lgplf%3FtrackingId=EuNOjpKCSQ%252BVfpiQV3D6KQ%253D%253D/?trackingId=EuNOjpKCSQ%2BVfpiQV3D6KQ%3D%3D', 'raw': 'https://www.linkedin.com/pulse/how-deploy-pod-runpod-verify-working-furkan-g%2525C3%2525B6z%2525C3%2525BCkara-lgplf%3FtrackingId=EuNOjpKCSQ%252BVfpiQV3D6KQ%253D%253D/?trackingId=EuNOjpKCSQ%2BVfpiQV3D6KQ%3D%3D'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Dev . to (public) : ', 'raw': 'Dev . to (public) : '}, {'type': 'link', 'href': 'https://dev.to/furkangozukara/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working-3pop', 'raw': 'https://dev.to/furkangozukara/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working-3pop'}, {'type': 'new_line', 'raw': '\n'}]","Today my RunPod pod was broken and I didn't notice until I fully did setup it. So I have written the following tutorial for how to deploy a Pod and also verify it is not broken. + +You can read on: + +Patreon (public) : https://www.patreon.com/posts/how-to-deploy-on-97919576 + +Medium (public) : https://medium.com/@furkangozukara/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working-20e47031c0b5 + +CivitAI (public) : https://civitai.com/articles/3994/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working + +LinkedIn (public) : https://www.linkedin.com/pulse/how-deploy-pod-runpod-verify-working-furkan-g%2525C3%2525B6z%2525C3%2525BCkara-lgplf%3FtrackingId=EuNOjpKCSQ%252BVfpiQV3D6KQ%253D%253D/?trackingId=EuNOjpKCSQ%2BVfpiQV3D6KQ%3D%3D + +Dev . to (public) : https://dev.to/furkangozukara/how-to-deploy-a-pod-on-runpod-and-verify-it-is-working-3pop +",[],[],[],2024-02-05 22:31:01,2024-02-05 22:31:01.648,[],/posts/MonsterMMORPG/982415168433293,34,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,777035688509520,"[{'type': 'text', 'value': 'StepCoder', 'raw': 'StepCoder'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Improve Code Generation with Reinforcement Learning from Compiler Feedback', 'raw': 'Improve Code Generation with Reinforcement Learning from Compiler Feedback'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'paper page: ', 'raw': 'paper page: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.01391'}, 'url': 'https://huggingface.co/papers/2402.01391', 'raw': 'https://huggingface.co/papers/2402.01391', 'label': 'StepCoder: Improve Code Generation with Reinforcement Learning from\n Compiler Feedback (2402.01391)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The advancement of large language models (LLMs) has significantly propelled the field of code generation. Previous work integrated reinforcement learning (RL) with compiler feedback for exploring the output space of LLMs to enhance code generation quality. However, the lengthy code generated by LLMs in response to complex human requirements makes RL exploration a challenge. Also, since the unit tests may not cover the complicated code, optimizing LLMs by using these unexecuted code snippets is ineffective. To tackle these challenges, we introduce StepCoder, a novel RL framework for code generation, consisting of two main components: CCCS addresses the exploration challenge by breaking the long sequences code generation task into a Curriculum of Code Completion Subtasks, while FGO only optimizes the model by masking the unexecuted code segments to provide Fine-Grained Optimization. In addition, we furthermore construct the APPS+ dataset for RL training, which is manually verified to ensure the correctness of unit tests. Experimental results show that our method improves the ability to explore the output space and outperforms state-of-the-art approaches in corresponding benchmarks.', 'raw': 'The advancement of large language models (LLMs) has significantly propelled the field of code generation. Previous work integrated reinforcement learning (RL) with compiler feedback for exploring the output space of LLMs to enhance code generation quality. However, the lengthy code generated by LLMs in response to complex human requirements makes RL exploration a challenge. Also, since the unit tests may not cover the complicated code, optimizing LLMs by using these unexecuted code snippets is ineffective. To tackle these challenges, we introduce StepCoder, a novel RL framework for code generation, consisting of two main components: CCCS addresses the exploration challenge by breaking the long sequences code generation task into a Curriculum of Code Completion Subtasks, while FGO only optimizes the model by masking the unexecuted code segments to provide Fine-Grained Optimization. In addition, we furthermore construct the APPS+ dataset for RL training, which is manually verified to ensure the correctness of unit tests. Experimental results show that our method improves the ability to explore the output space and outperforms state-of-the-art approaches in corresponding benchmarks.'}]","StepCoder + +Improve Code Generation with Reinforcement Learning from Compiler Feedback + +paper page: https://huggingface.co/papers/2402.01391 + +The advancement of large language models (LLMs) has significantly propelled the field of code generation. Previous work integrated reinforcement learning (RL) with compiler feedback for exploring the output space of LLMs to enhance code generation quality. However, the lengthy code generated by LLMs in response to complex human requirements makes RL exploration a challenge. Also, since the unit tests may not cover the complicated code, optimizing LLMs by using these unexecuted code snippets is ineffective. To tackle these challenges, we introduce StepCoder, a novel RL framework for code generation, consisting of two main components: CCCS addresses the exploration challenge by breaking the long sequences code generation task into a Curriculum of Code Completion Subtasks, while FGO only optimizes the model by masking the unexecuted code segments to provide Fine-Grained Optimization. In addition, we furthermore construct the APPS+ dataset for RL training, which is manually verified to ensure the correctness of unit tests. Experimental results show that our method improves the ability to explore the output space and outperforms state-of-the-art approaches in corresponding benchmarks.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/QO4L9sxPAxeS77wmxAEEo.png'}]",[],"[{'reaction': '👍', 'users': ['osanseviero', 'clem', 'kramp', 'Dlbk', 'sepal'], 'count': 5}]",2024-02-05 21:37:39,2024-02-05 21:37:39.858,[],/posts/akhaliq/777035688509520,41,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png,883.0,Abubakar Abid,abidlabs,965503230990660,"[{'type': 'text', 'value': 'Necessity is the mother of invention, and of Gradio components.', 'raw': 'Necessity is the mother of invention, and of Gradio components.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sometimes we realize that we need a Gradio component to build a cool application and demo, so we just build it. For example, we just added a new gr.ParamViewer component because we needed it to display information about Python & JavaScript functions in our documentation. ', 'raw': 'Sometimes we realize that we need a Gradio component to build a cool application and demo, so we just build it. For example, we just added a new gr.ParamViewer component because we needed it to display information about Python & JavaScript functions in our documentation. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Of course, our users should be able able to do the same thing for their machine learning applications, so that's why Gradio lets you build custom components, and publish them to the world 🔥"", 'raw': ""Of course, our users should be able able to do the same thing for their machine learning applications, so that's why Gradio lets you build custom components, and publish them to the world 🔥""}]","Necessity is the mother of invention, and of Gradio components. + +Sometimes we realize that we need a Gradio component to build a cool application and demo, so we just build it. For example, we just added a new gr.ParamViewer component because we needed it to display information about Python & JavaScript functions in our documentation. + +Of course, our users should be able able to do the same thing for their machine learning applications, so that's why Gradio lets you build custom components, and publish them to the world 🔥","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/608b8bb39d7c9519b4adae19/L2n43bWPHUz8hX-Qv8VPX.gif'}]",[],"[{'reaction': '👍', 'users': ['sbarman25', 'victor', 'samusenps', 'ysharma', 'ajibawa-2023', 'gsarti', 'osanseviero', 'dvilasuero', 'hmb', 'notsahil'], 'count': 10}, {'reaction': '🤯', 'users': ['ysharma', 'dvilasuero', 'notsahil'], 'count': 3}]",2024-02-05 18:43:21,2024-02-05 18:47:13.645,[],/posts/abidlabs/965503230990660,504,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1654090481550-60f2e021adf471cbdf8bb660.jpeg,151.0,Manuel Faysse,manu,470307658539102,"[{'type': 'text', 'value': ""These past months, I've been busy baking a special sort of Croissant 🥐 with an awesome team ! "", 'raw': ""These past months, I've been busy baking a special sort of Croissant 🥐 with an awesome team ! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🥐 CroissantLLM is a truly bilingual language model trained on 3 trillion tokens of French and English data. In its size category (<2B), it is the best model in French, but it also rivals the best monolingual English models ! ', 'raw': '🥐 CroissantLLM is a truly bilingual language model trained on 3 trillion tokens of French and English data. In its size category (<2B), it is the best model in French, but it also rivals the best monolingual English models ! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💾 To train it, we collected, filtered and cleaned huge quantities of permissively licensed French data, across various domains (legal, administrative, cultural, scientific), and different text modalities (speech transcriptions, movie subtitles, encyclopedias, forums, webpages)... ', 'raw': '💾 To train it, we collected, filtered and cleaned huge quantities of permissively licensed French data, across various domains (legal, administrative, cultural, scientific), and different text modalities (speech transcriptions, movie subtitles, encyclopedias, forums, webpages)... '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚖️ Assessing LLM performance is not easy, especially outside of English, and to this end we crafted a novel evaluation benchmark, FrenchBench, aiming to assess reasoning, factual knowledge, and linguistic capabilities of models in French !', 'raw': '⚖️ Assessing LLM performance is not easy, especially outside of English, and to this end we crafted a novel evaluation benchmark, FrenchBench, aiming to assess reasoning, factual knowledge, and linguistic capabilities of models in French !'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""🔎 The best current LLMs are hidden behind a shroud of mystery, trained with undisclosed training data mixes or strategies. We go the opposite way, releasing all of the project's artefacts (model checkpoints, data, training details, evaluation benchmarks...) We obtain 81 % of the Stanford FMTI transparency criterias, far ahead of even most open initiatives !"", 'raw': ""🔎 The best current LLMs are hidden behind a shroud of mystery, trained with undisclosed training data mixes or strategies. We go the opposite way, releasing all of the project's artefacts (model checkpoints, data, training details, evaluation benchmarks...) We obtain 81 % of the Stanford FMTI transparency criterias, far ahead of even most open initiatives !""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🧪Beyond a powerful industrial resource, our transparent initiative is a stepping stone for many scientific questions ! How does teaching a model two languages instead of one splits its monolingual ability ? Does training on so much French help the model integrate French-centric knowledge and cultural biases ? How does the model memorize the training data ?', 'raw': '🧪Beyond a powerful industrial resource, our transparent initiative is a stepping stone for many scientific questions ! How does teaching a model two languages instead of one splits its monolingual ability ? Does training on so much French help the model integrate French-centric knowledge and cultural biases ? How does the model memorize the training data ?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Many more things to say, for those interested, I recommend checking out:', 'raw': 'Many more things to say, for those interested, I recommend checking out:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🗞️ The blogpost: ', 'raw': '🗞️ The blogpost: '}, {'type': 'link', 'href': 'https://huggingface.co/blog/manu/croissant-llm-blog', 'raw': 'https://huggingface.co/blog/manu/croissant-llm-blog'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📖 The 45 page report with lots of gems: ', 'raw': '📖 The 45 page report with lots of gems: '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2402.00786', 'raw': 'https://arxiv.org/abs/2402.00786'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🤖 Models, Data, Demo: ', 'raw': '🤖 Models, Data, Demo: '}, {'type': 'resource', 'resource': {'type': 'org', 'id': 'croissantllm'}, 'url': 'https://huggingface.co/croissantllm', 'raw': 'https://huggingface.co/croissantllm', 'image': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60f2e021adf471cbdf8bb660/PuLzi_Evn-UPfTcZP0DSU.png'}, {'type': 'new_line', 'raw': '\n'}]","These past months, I've been busy baking a special sort of Croissant 🥐 with an awesome team ! + +🥐 CroissantLLM is a truly bilingual language model trained on 3 trillion tokens of French and English data. In its size category (<2B), it is the best model in French, but it also rivals the best monolingual English models ! + +💾 To train it, we collected, filtered and cleaned huge quantities of permissively licensed French data, across various domains (legal, administrative, cultural, scientific), and different text modalities (speech transcriptions, movie subtitles, encyclopedias, forums, webpages)... + +⚖️ Assessing LLM performance is not easy, especially outside of English, and to this end we crafted a novel evaluation benchmark, FrenchBench, aiming to assess reasoning, factual knowledge, and linguistic capabilities of models in French ! + +🔎 The best current LLMs are hidden behind a shroud of mystery, trained with undisclosed training data mixes or strategies. We go the opposite way, releasing all of the project's artefacts (model checkpoints, data, training details, evaluation benchmarks...) We obtain 81 % of the Stanford FMTI transparency criterias, far ahead of even most open initiatives ! + +🧪Beyond a powerful industrial resource, our transparent initiative is a stepping stone for many scientific questions ! How does teaching a model two languages instead of one splits its monolingual ability ? Does training on so much French help the model integrate French-centric knowledge and cultural biases ? How does the model memorize the training data ? + +Many more things to say, for those interested, I recommend checking out: + +🗞️ The blogpost: https://huggingface.co/blog/manu/croissant-llm-blog +📖 The 45 page report with lots of gems: https://arxiv.org/abs/2402.00786 +🤖 Models, Data, Demo: https://huggingface.co/croissantllm +",[],[],"[{'reaction': '❤️', 'users': ['eliolio', 'gsarti', 'euclaise', 'eliebak', 'kramp', 'yozozaya', 'clem', 'blanchon', 'victor', 'mvaloatto', 'osanseviero', 'samusenps', 'santiviquez', 'ksiabani', 'batmac', 'nouamanetazi', 'davanstrien', 'fffiloni', 'alielfilali01', 'sbrandeis', 'Soubz', 'velaia'], 'count': 22}, {'reaction': '🤯', 'users': ['fffiloni', 'Soubz'], 'count': 2}]",2024-02-05 15:12:40,2024-02-06 07:27:15.701,"[{'_id': '5e7749883d77a72421292d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg', 'fullname': 'Gabriele Sarti', 'name': 'gsarti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 226, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '64955109ac70da05b7aacb9a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/64955109ac70da05b7aacb9a/bZKEz24ZfaWDSI33yHUmR.png', 'fullname': 'Kostas Siabanis', 'name': 'ksiabani', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}]",/posts/manu/470307658539102,2873,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,229518612093214,"[{'type': 'text', 'value': 'Some of my results from experimenting with hallucination detection techniques for LLMs \U0001fae8🔍', 'raw': 'Some of my results from experimenting with hallucination detection techniques for LLMs \U0001fae8🔍'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'First, the two main ideas used in the experiments—using token probabilities and LLM-Eval scores—are taken from these three papers:', 'raw': 'First, the two main ideas used in the experiments—using token probabilities and LLM-Eval scores—are taken from these three papers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. ', 'raw': '1. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2208.05309'}, 'url': 'https://huggingface.co/papers/2208.05309', 'raw': 'https://huggingface.co/papers/2208.05309', 'label': 'Looking for a Needle in a Haystack: A Comprehensive Study of\n Hallucinations in Neural Machine Translation (2208.05309)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. ', 'raw': '2. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2303.08896'}, 'url': 'https://huggingface.co/papers/2303.08896', 'raw': 'https://huggingface.co/papers/2303.08896', 'label': 'SelfCheckGPT: Zero-Resource Black-Box Hallucination Detection for\n Generative Large Language Models (2303.08896)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. ', 'raw': '3. '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2305.13711'}, 'url': 'https://huggingface.co/papers/2305.13711', 'raw': 'https://huggingface.co/papers/2305.13711', 'label': 'LLM-Eval: Unified Multi-Dimensional Automatic Evaluation for Open-Domain\n Conversations with Large Language Models (2305.13711)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In the first two, the authors claim that computing the average of the sentence-level token probabilities is the best heuristic for detecting hallucinations. And from my results, we do see a weak positive correlation between average token probabilities and ground truth. 🤔', 'raw': 'In the first two, the authors claim that computing the average of the sentence-level token probabilities is the best heuristic for detecting hallucinations. And from my results, we do see a weak positive correlation between average token probabilities and ground truth. 🤔'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The nice thing about this method is that it comes with almost no implementation cost since we only need the output token probabilities from the generated text, so it is straightforward to implement.', 'raw': 'The nice thing about this method is that it comes with almost no implementation cost since we only need the output token probabilities from the generated text, so it is straightforward to implement.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The third paper proposes an evaluation shema where we do an extra call to an LLM and kindly ask it to rate on a scale from 0 to 5 how good the generated text is on a set of different criteria. 📝🤖', 'raw': 'The third paper proposes an evaluation shema where we do an extra call to an LLM and kindly ask it to rate on a scale from 0 to 5 how good the generated text is on a set of different criteria. 📝🤖'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I was able to reproduce similar results to those in the paper. There is a moderate positive correlation between the ground truth scores and the ones produced by the LLM.', 'raw': 'I was able to reproduce similar results to those in the paper. There is a moderate positive correlation between the ground truth scores and the ones produced by the LLM.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Of course, this method is much more expensive since we would need one extra call to the LLM for every prediction that we would like to evaluate, and it is also very sensitive to prompt engineering. 🤷', 'raw': 'Of course, this method is much more expensive since we would need one extra call to the LLM for every prediction that we would like to evaluate, and it is also very sensitive to prompt engineering. 🤷'}]","Some of my results from experimenting with hallucination detection techniques for LLMs 🫨🔍 + +First, the two main ideas used in the experiments—using token probabilities and LLM-Eval scores—are taken from these three papers: + +1. https://huggingface.co/papers/2208.05309 +2. https://huggingface.co/papers/2303.08896 +3. https://huggingface.co/papers/2305.13711 + +In the first two, the authors claim that computing the average of the sentence-level token probabilities is the best heuristic for detecting hallucinations. And from my results, we do see a weak positive correlation between average token probabilities and ground truth. 🤔 + +The nice thing about this method is that it comes with almost no implementation cost since we only need the output token probabilities from the generated text, so it is straightforward to implement. + +The third paper proposes an evaluation shema where we do an extra call to an LLM and kindly ask it to rate on a scale from 0 to 5 how good the generated text is on a set of different criteria. 📝🤖 + +I was able to reproduce similar results to those in the paper. There is a moderate positive correlation between the ground truth scores and the ones produced by the LLM. + +Of course, this method is much more expensive since we would need one extra call to the LLM for every prediction that we would like to evaluate, and it is also very sensitive to prompt engineering. 🤷","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/gbbnaj8ipntSy7YvjjMic.jpeg'}]",[],"[{'reaction': '❤️', 'users': ['osanseviero', 'victor', 'clem', 'samusenps', 'gsarti', 'KasperNomm', 'JasperV13', 'sbrandeis'], 'count': 8}]",2024-02-05 10:16:27,2024-02-06 13:42:47.946,"[{'_id': '5e7749883d77a72421292d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg', 'fullname': 'Gabriele Sarti', 'name': 'gsarti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 226, 'isFollowing': False}, {'_id': '629a173153a72d997d3f57d0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg', 'fullname': 'Santiago Viquez', 'name': 'santiviquez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 86, 'isFollowing': False}]",/posts/santiviquez/229518612093214,5,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,270417770956024,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: A Chain-of-Thought Is as Strong as Its Weakest Link: A Benchmark for Verifiers of Reasoning Chains by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: A Chain-of-Thought Is as Strong as Its Weakest Link: A Benchmark for Verifiers of Reasoning Chains by ""}, {'type': 'mention', 'user': 'alonjacovi', 'raw': '@alonjacovi'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'yonatanbitton', 'raw': '@yonatanbitton'}, {'type': 'text', 'value': ' B. Bohnet J. Herzig ', 'raw': ' B. Bohnet J. Herzig '}, {'type': 'mention', 'user': 'orhonovic', 'raw': '@orhonovic'}, {'type': 'text', 'value': ' M. Tseng M. Collins ', 'raw': ' M. Tseng M. Collins '}, {'type': 'mention', 'user': 'roeeaharoni', 'raw': '@roeeaharoni'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'mega', 'raw': '@mega'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This work introduces a new methodology for human verification of reasoning chains and adopts it to annotate a dataset of chain-of-thought reasoning chains produced by 3 LMs. The annotated dataset, REVEAL, can be used to benchmark automatic verifiers of reasoning in LMs.', 'raw': 'This work introduces a new methodology for human verification of reasoning chains and adopts it to annotate a dataset of chain-of-thought reasoning chains produced by 3 LMs. The annotated dataset, REVEAL, can be used to benchmark automatic verifiers of reasoning in LMs.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In their analysis, the authors find that LM-produced CoTs generally contain faulty steps, often leading to incorrect automatic verification. In particular, CoT-generating LMs are found to produce non-attributable reasoning steps often, and reasoning verifiers generally struggle to verify logical correctness.', 'raw': 'In their analysis, the authors find that LM-produced CoTs generally contain faulty steps, often leading to incorrect automatic verification. In particular, CoT-generating LMs are found to produce non-attributable reasoning steps often, and reasoning verifiers generally struggle to verify logical correctness.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2402.00559'}, 'url': 'https://huggingface.co/papers/2402.00559', 'raw': 'https://huggingface.co/papers/2402.00559', 'label': 'A Chain-of-Thought Is as Strong as Its Weakest Link: A Benchmark for\n Verifiers of Reasoning Chains (2402.00559)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔡 Dataset: ', 'raw': '🔡 Dataset: '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'google/reveal'}, 'url': 'https://huggingface.co/datasets/google/reveal', 'raw': 'https://huggingface.co/datasets/google/reveal'}]","🔍 Today's pick in Interpretability & Analysis of LMs: A Chain-of-Thought Is as Strong as Its Weakest Link: A Benchmark for Verifiers of Reasoning Chains by @alonjacovi @yonatanbitton B. Bohnet J. Herzig @orhonovic M. Tseng M. Collins @roeeaharoni @mega + +This work introduces a new methodology for human verification of reasoning chains and adopts it to annotate a dataset of chain-of-thought reasoning chains produced by 3 LMs. The annotated dataset, REVEAL, can be used to benchmark automatic verifiers of reasoning in LMs. + +In their analysis, the authors find that LM-produced CoTs generally contain faulty steps, often leading to incorrect automatic verification. In particular, CoT-generating LMs are found to produce non-attributable reasoning steps often, and reasoning verifiers generally struggle to verify logical correctness. + +📄 Paper: https://huggingface.co/papers/2402.00559 +🔡 Dataset: https://huggingface.co/datasets/google/reveal","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/QumntyDreWdVTISmKnrt_.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/7QE337VIajw_i31gzlT0t.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/I2llCUXoFPuhLArL8zOJo.png'}]","[{'_id': '6596c90dfe0f9826a63654d0', 'avatarUrl': '/avatars/e616fbdff1a345ad22081f5cd019329a.svg', 'fullname': 'Alon Jacovi', 'name': 'alonjacovi', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}, {'_id': '610b729f9da682cd54ad9adf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1628140189042-noauth.jpeg', 'fullname': 'Mor Geva', 'name': 'mega', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}, {'_id': '6363bf2b123a5d5cd4a8fe7c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1667756718733-6363bf2b123a5d5cd4a8fe7c.jpeg', 'fullname': 'Roee Aharoni', 'name': 'roeeaharoni', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 10}, {'_id': '632e0771ae0a7b1fc95630bf', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1663961181981-632e0771ae0a7b1fc95630bf.jpeg', 'fullname': 'Yonatan', 'name': 'yonatanbitton', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 5}]","[{'reaction': '🤗', 'users': ['s3nh', 'roeeaharoni', 'alonjacovi', 'osanseviero', 'clem', 'manu'], 'count': 6}, {'reaction': '👍', 'users': ['Yonatan-Bitton'], 'count': 1}]",2024-02-05 06:51:53,2024-02-05 06:51:53.350,[],/posts/gsarti/270417770956024,24,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1594192845975-5e1e17b6fcf41d740b6996a8.jpeg,211.0,Bram Vanroy,BramVanroy,679226771675158,"[{'type': 'text', 'value': '📣 DPO Dutch model release + datasets ', 'raw': '📣 DPO Dutch model release + datasets '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After teasing for a while, I am finally releasing **GEITje 7B Ultra**, building upon the great GEITje 7B by ', 'raw': 'After teasing for a while, I am finally releasing **GEITje 7B Ultra**, building upon the great GEITje 7B by '}, {'type': 'mention', 'user': 'Rijgersberg', 'raw': '@Rijgersberg'}, {'type': 'text', 'value': "". New contributions include: large new datasets for SFT (instruction/chat), two datasets for DPO training (i.e. RLAIF), and an SFT and DPO version of GEITje. The READMEs describe everything well (I hope), and I'll also share more info on social medias tomorrow. "", 'raw': "". New contributions include: large new datasets for SFT (instruction/chat), two datasets for DPO training (i.e. RLAIF), and an SFT and DPO version of GEITje. The READMEs describe everything well (I hope), and I'll also share more info on social medias tomorrow. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'For me this is a huge release, the datasets more so than the models. I\'m especially pleased with UltraChat, which I created with the intent of having a diverse dataset - the model must be able to communicate with different types of users. So the user questions are created as if they were written by different personas, e.g. language learners, young children, experts, critics, etc. The focus with this is ""building a good communication bot that is accessible and can handle different kinds of user input"".', 'raw': 'For me this is a huge release, the datasets more so than the models. I\'m especially pleased with UltraChat, which I created with the intent of having a diverse dataset - the model must be able to communicate with different types of users. So the user questions are created as if they were written by different personas, e.g. language learners, young children, experts, critics, etc. The focus with this is ""building a good communication bot that is accessible and can handle different kinds of user input"".'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I wish I could find the time to also write a paper to get some ""academic recognition"" but that\'ll have to wait for now. I just want to bring it to the public so that others can play with it and use it to build new, cool stuff!', 'raw': 'I wish I could find the time to also write a paper to get some ""academic recognition"" but that\'ll have to wait for now. I just want to bring it to the public so that others can play with it and use it to build new, cool stuff!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I hope that you can all appreciate the work. Let's build some cool stuff with it!"", 'raw': ""I hope that you can all appreciate the work. Let's build some cool stuff with it!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Models:', 'raw': 'Models:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Demo: ', 'raw': '- Demo: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/BramVanroy/GEITje-7B-ultra', 'raw': 'https://huggingface.co/spaces/BramVanroy/GEITje-7B-ultra'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- DPO Model: ', 'raw': '- DPO Model: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'BramVanroy/GEITje-7B-ultra'}, 'url': 'https://huggingface.co/BramVanroy/GEITje-7B-ultra', 'raw': 'https://huggingface.co/BramVanroy/GEITje-7B-ultra'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- SFT model (not recommended): ', 'raw': '- SFT model (not recommended): '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'BramVanroy/GEITje-7B-ultra-sft'}, 'url': 'https://huggingface.co/BramVanroy/GEITje-7B-ultra-sft', 'raw': 'https://huggingface.co/BramVanroy/GEITje-7B-ultra-sft'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Datasets with GPT-4 turbo completions:', 'raw': 'Datasets with GPT-4 turbo completions:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - No robots (~10k instructions): ', 'raw': ' - No robots (~10k instructions): '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/no_robots_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/no_robots_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/no_robots_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - UltraChat (~200k instructions): ', 'raw': ' - UltraChat (~200k instructions): '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/ultrachat_200k_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/ultrachat_200k_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/ultrachat_200k_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - UltraFeedback (DPO with GPT4+GEITje chat, ~50k): ', 'raw': ' - UltraFeedback (DPO with GPT4+GEITje chat, ~50k): '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/ultra_feedback_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' - Orca DPO Pairs (DPO with GPT4+GEITje chat, ~10k): ', 'raw': ' - Orca DPO Pairs (DPO with GPT4+GEITje chat, ~10k): '}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'BramVanroy/orca_dpo_pairs_dutch'}, 'url': 'https://huggingface.co/datasets/BramVanroy/orca_dpo_pairs_dutch', 'raw': 'https://huggingface.co/datasets/BramVanroy/orca_dpo_pairs_dutch'}]","📣 DPO Dutch model release + datasets + +After teasing for a while, I am finally releasing **GEITje 7B Ultra**, building upon the great GEITje 7B by @Rijgersberg. New contributions include: large new datasets for SFT (instruction/chat), two datasets for DPO training (i.e. RLAIF), and an SFT and DPO version of GEITje. The READMEs describe everything well (I hope), and I'll also share more info on social medias tomorrow. + +For me this is a huge release, the datasets more so than the models. I'm especially pleased with UltraChat, which I created with the intent of having a diverse dataset - the model must be able to communicate with different types of users. So the user questions are created as if they were written by different personas, e.g. language learners, young children, experts, critics, etc. The focus with this is ""building a good communication bot that is accessible and can handle different kinds of user input"". + +I wish I could find the time to also write a paper to get some ""academic recognition"" but that'll have to wait for now. I just want to bring it to the public so that others can play with it and use it to build new, cool stuff! + +I hope that you can all appreciate the work. Let's build some cool stuff with it! + +Models: +- Demo: https://huggingface.co/spaces/BramVanroy/GEITje-7B-ultra +- DPO Model: https://huggingface.co/BramVanroy/GEITje-7B-ultra +- SFT model (not recommended): https://huggingface.co/BramVanroy/GEITje-7B-ultra-sft + +Datasets with GPT-4 turbo completions: + - No robots (~10k instructions): https://huggingface.co/datasets/BramVanroy/no_robots_dutch + - UltraChat (~200k instructions): https://huggingface.co/datasets/BramVanroy/ultrachat_200k_dutch + - UltraFeedback (DPO with GPT4+GEITje chat, ~50k): https://huggingface.co/datasets/BramVanroy/ultra_feedback_dutch + - Orca DPO Pairs (DPO with GPT4+GEITje chat, ~10k): https://huggingface.co/datasets/BramVanroy/orca_dpo_pairs_dutch",[],"[{'_id': '6319b164bc8f3b313f7a1db0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6319b164bc8f3b313f7a1db0/Hh0kuwsAnD2AOKdL6PpRs.png', 'fullname': 'Edwin Rijgersberg', 'name': 'Rijgersberg', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 55}]","[{'reaction': '❤️', 'users': ['samusenps', 'osanseviero', 'beomi', 'p208p2002', 'gsarti', 's3nh', 'taufiqdp', 'victor', 'Stopwolf', 'Robbert', 'jvdgoltz', 'seostar', 'clem', 'cast42', 'jvh', 'ajrogier', 'dragonkue'], 'count': 17}, {'reaction': '🤝', 'users': ['samusenps', 'osanseviero', 'gsarti', 'HuggyMonkey'], 'count': 4}]",2024-02-04 18:15:13,2024-02-05 06:42:56.083,"[{'_id': '6319b164bc8f3b313f7a1db0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6319b164bc8f3b313f7a1db0/Hh0kuwsAnD2AOKdL6PpRs.png', 'fullname': 'Edwin Rijgersberg', 'name': 'Rijgersberg', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 55, 'isFollowing': False}, {'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '5e7749883d77a72421292d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg', 'fullname': 'Gabriele Sarti', 'name': 'gsarti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 226, 'isFollowing': False}]",/posts/BramVanroy/679226771675158,141,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6401c8c9f98fbc64bcd7dca1/MOSgc_mPbfUZ-354osy1v.png,241.0,FBL,fblgit,526943645565773,"[{'type': 'text', 'value': 'Introducing model-similarities, a new simple tool to contrast two models', 'raw': 'Introducing model-similarities, a new simple tool to contrast two models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A straightforward yet insightful tool designed to shed light on the similarities between various models. Discover it now at [Model Similarity GitHub Repository](', 'raw': 'A straightforward yet insightful tool designed to shed light on the similarities between various models. Discover it now at [Model Similarity GitHub Repository]('}, {'type': 'link', 'href': 'https://github.com/fblgit/model-similarity', 'raw': 'https://github.com/fblgit/model-similarity'}, {'type': 'text', 'value': ').', 'raw': ').'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This project is in its nascent stages, and we're eager for contributions and enhancements. Crafted with simplicity at its core, the tool performs two primary comparisons:"", 'raw': ""This project is in its nascent stages, and we're eager for contributions and enhancements. Crafted with simplicity at its core, the tool performs two primary comparisons:""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Weight similarities, utilizing a simple approach to contrast vector differences (A != B).', 'raw': '- Weight similarities, utilizing a simple approach to contrast vector differences (A != B).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '- Cosine similarity between the parameters of models A and B, providing a nuanced measure of their alignment.', 'raw': '- Cosine similarity between the parameters of models A and B, providing a nuanced measure of their alignment.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Included in the repository are sample analyses and reports that validate model card claims, particularly regarding the training specifics of transformer components such as MLP, Attention, etc. Remarkably, these samples reveal 100% similarity scores between those parts of the models, pinpointing the exact base model utilized.', 'raw': 'Included in the repository are sample analyses and reports that validate model card claims, particularly regarding the training specifics of transformer components such as MLP, Attention, etc. Remarkably, these samples reveal 100% similarity scores between those parts of the models, pinpointing the exact base model utilized.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Join us in refining and expanding this tool. Whether you're looking to contribute code, ideas, or both, your input will help transform this into a resource for everyone."", 'raw': ""Join us in refining and expanding this tool. Whether you're looking to contribute code, ideas, or both, your input will help transform this into a resource for everyone.""}]","Introducing model-similarities, a new simple tool to contrast two models + +A straightforward yet insightful tool designed to shed light on the similarities between various models. Discover it now at [Model Similarity GitHub Repository](https://github.com/fblgit/model-similarity). + +This project is in its nascent stages, and we're eager for contributions and enhancements. Crafted with simplicity at its core, the tool performs two primary comparisons: +- Weight similarities, utilizing a simple approach to contrast vector differences (A != B). +- Cosine similarity between the parameters of models A and B, providing a nuanced measure of their alignment. + +Included in the repository are sample analyses and reports that validate model card claims, particularly regarding the training specifics of transformer components such as MLP, Attention, etc. Remarkably, these samples reveal 100% similarity scores between those parts of the models, pinpointing the exact base model utilized. + +Join us in refining and expanding this tool. Whether you're looking to contribute code, ideas, or both, your input will help transform this into a resource for everyone.",[],[],"[{'reaction': '❤️', 'users': ['fblgit', 'shivamanbhule', 'thomasgauthier', 'osanseviero', 'distantquant', 'victor', 'samusenps', 'shuvom', 'mohammedbriman', 'santiviquez', 'clem', 'alielfilali01'], 'count': 12}, {'reaction': '👍', 'users': ['samusenps', 'clem'], 'count': 2}]",2024-02-04 08:31:55,2024-02-04 08:31:55.454,[],/posts/fblgit/526943645565773,188,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61868ce808aae0b5499a2a95/F6BA0anbsoY_Z7M1JrwOe.jpeg,6819.0,Sylvain Filoni,fffiloni,517233644187784,"[{'type': 'text', 'value': 'Quick build of the day: LCM Supa Fast Image Variation', 'raw': 'Quick build of the day: LCM Supa Fast Image Variation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '—', 'raw': '—'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We take the opportunity to combine moondream1 vision and LCM SDXL fast abilities to generate a variation from the subject of the image input. ', 'raw': 'We take the opportunity to combine moondream1 vision and LCM SDXL fast abilities to generate a variation from the subject of the image input. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All that thanks to gradio APIs 🤗', 'raw': 'All that thanks to gradio APIs 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Try the space: ', 'raw': 'Try the space: '}, {'type': 'link', 'href': 'https://huggingface.co/spaces/fffiloni/lcm-img-variations', 'raw': 'https://huggingface.co/spaces/fffiloni/lcm-img-variations'}]","Quick build of the day: LCM Supa Fast Image Variation +— +We take the opportunity to combine moondream1 vision and LCM SDXL fast abilities to generate a variation from the subject of the image input. +All that thanks to gradio APIs 🤗 + +Try the space: https://huggingface.co/spaces/fffiloni/lcm-img-variations","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/GtonkxIsjDTtcY6Y1Kcy3.mp4'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/61868ce808aae0b5499a2a95/GKAaHxr542w4sm54NKW19.png'}]",[],"[{'reaction': '❤️', 'users': ['samusenps', 'victor', 'clem', 'osanseviero', 'designfailure', 'DamarJati', 'merve'], 'count': 7}, {'reaction': '🤝', 'users': ['designfailure', 'merve'], 'count': 2}, {'reaction': '👍', 'users': ['designfailure', 'merve'], 'count': 2}, {'reaction': '🤯', 'users': ['designfailure', 'merve'], 'count': 2}]",2024-01-30 15:28:53,2024-01-30 21:14:19.747,"[{'_id': '5f17f0a0925b9863e28ad517', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5f17f0a0925b9863e28ad517/fXIY5i9RLsIa1v3CCuVtt.jpeg', 'fullname': 'Victor Mustar', 'name': 'victor', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3949, 'isFollowing': False}, {'_id': '6444dc30a56444c355d63a3c', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6444dc30a56444c355d63a3c/qpWjihAsfQX94MPRw2Enq.jpeg', 'fullname': 'fAleš', 'name': 'designfailure', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 8, 'isFollowing': False}, {'_id': '61868ce808aae0b5499a2a95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61868ce808aae0b5499a2a95/F6BA0anbsoY_Z7M1JrwOe.jpeg', 'fullname': 'Sylvain Filoni', 'name': 'fffiloni', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6819, 'isFollowing': False}]",/posts/fffiloni/517233644187784,206,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,866574398272311,"[{'type': 'text', 'value': 'InternLM-XComposer2', 'raw': 'InternLM-XComposer2'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mastering Free-form Text-Image Composition and Comprehension in Vision-Language Large Model', 'raw': 'Mastering Free-form Text-Image Composition and Comprehension in Vision-Language Large Model'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'paper page: ', 'raw': 'paper page: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.16420'}, 'url': 'https://huggingface.co/papers/2401.16420', 'raw': 'https://huggingface.co/papers/2401.16420', 'label': 'InternLM-XComposer2: Mastering Free-form Text-Image Composition and\n Comprehension in Vision-Language Large Model (2401.16420)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Experimental results demonstrate the superiority of InternLM-XComposer2 based on InternLM2-7B in producing high-quality long-text multi-modal content and its exceptional vision-language understanding performance across various benchmarks, where it not only significantly outperforms existing multimodal models but also matches or even surpasses GPT-4V and Gemini Pro in certain assessments.', 'raw': 'Experimental results demonstrate the superiority of InternLM-XComposer2 based on InternLM2-7B in producing high-quality long-text multi-modal content and its exceptional vision-language understanding performance across various benchmarks, where it not only significantly outperforms existing multimodal models but also matches or even surpasses GPT-4V and Gemini Pro in certain assessments.'}]","InternLM-XComposer2 + +Mastering Free-form Text-Image Composition and Comprehension in Vision-Language Large Model + +paper page: https://huggingface.co/papers/2401.16420 + +Experimental results demonstrate the superiority of InternLM-XComposer2 based on InternLM2-7B in producing high-quality long-text multi-modal content and its exceptional vision-language understanding performance across various benchmarks, where it not only significantly outperforms existing multimodal models but also matches or even surpasses GPT-4V and Gemini Pro in certain assessments.","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/LSW25B_vGC8ox8cw_u7GT.mp4'}]",[],"[{'reaction': '❤️', 'users': ['clem', 'osanseviero', 'ziozzang', 'merve'], 'count': 4}, {'reaction': '🤝', 'users': ['HuggyMonkey', 'clem', 'merve'], 'count': 3}]",2024-01-30 15:04:25,2024-01-30 15:04:25.565,[],/posts/akhaliq/866574398272311,31,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,766450938511715,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Black-Box Access is Insufficient for Rigorous AI Audits by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Black-Box Access is Insufficient for Rigorous AI Audits by ""}, {'type': 'mention', 'user': 'stecas', 'raw': '@stecas'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'carsonezell', 'raw': '@carsonezell'}, {'type': 'text', 'value': ' et al.', 'raw': ' et al.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Audits conducted on AI systems can identify potential risks and ensure their compliance to safety requirements. Authors categorise audits based on the access to model-related resources (black, grey, white and out-of-the box) and highlight how levels of transparency on audited AI system enable broader and more effective auditing procedures. Technical, physical, and legal safeguards for performing audits are also introduced to ensure minimal security risks for audited companies. Authors conclude that transparency on the type of auditors’ access and methods is a pre-requisite to correctly interpret audit results, and white- and outside-the-box access allow for substantially more scrutiny than black-box access alone.', 'raw': 'Audits conducted on AI systems can identify potential risks and ensure their compliance to safety requirements. Authors categorise audits based on the access to model-related resources (black, grey, white and out-of-the box) and highlight how levels of transparency on audited AI system enable broader and more effective auditing procedures. Technical, physical, and legal safeguards for performing audits are also introduced to ensure minimal security risks for audited companies. Authors conclude that transparency on the type of auditors’ access and methods is a pre-requisite to correctly interpret audit results, and white- and outside-the-box access allow for substantially more scrutiny than black-box access alone.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.14446'}, 'url': 'https://huggingface.co/papers/2401.14446', 'raw': 'https://huggingface.co/papers/2401.14446', 'label': 'Black-Box Access is Insufficient for Rigorous AI Audits (2401.14446)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Further readings:', 'raw': '🔍 Further readings:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄Taxonomy of AI system access: ', 'raw': '📄Taxonomy of AI system access: '}, {'type': 'link', 'href': 'https://bit.ly/struct-access', 'raw': 'https://bit.ly/struct-access'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💻An API for transparent science on Black-box AI (NNsight): ', 'raw': '💻An API for transparent science on Black-box AI (NNsight): '}, {'type': 'link', 'href': 'https://nnsight.net/about', 'raw': 'https://nnsight.net/about'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Black-Box Access is Insufficient for Rigorous AI Audits by @stecas @carsonezell et al. + +Audits conducted on AI systems can identify potential risks and ensure their compliance to safety requirements. Authors categorise audits based on the access to model-related resources (black, grey, white and out-of-the box) and highlight how levels of transparency on audited AI system enable broader and more effective auditing procedures. Technical, physical, and legal safeguards for performing audits are also introduced to ensure minimal security risks for audited companies. Authors conclude that transparency on the type of auditors’ access and methods is a pre-requisite to correctly interpret audit results, and white- and outside-the-box access allow for substantially more scrutiny than black-box access alone. + +📄 Paper: https://huggingface.co/papers/2401.14446 + +🔍 Further readings: + +📄Taxonomy of AI system access: https://bit.ly/struct-access +💻An API for transparent science on Black-box AI (NNsight): https://nnsight.net/about","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/85WJxBAursM4ZKJ6N_Wmg.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/aEC_ns75K8Cp2qVoa7XK-.png'}]","[{'_id': '64bc159138953777fe632c40', 'avatarUrl': '/avatars/a4a8c2281402a3c9955b52be99ad4656.svg', 'fullname': 'Carson Ezell', 'name': 'carsonezell', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False}, {'_id': '6466a046326128fd2c6c59c2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6466a046326128fd2c6c59c2/kYwPcxupelOBvKFB0y8Me.png', 'fullname': 'Stephen Casper', 'name': 'stecas', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}]","[{'reaction': '❤️', 'users': ['clem', 'skrishna', 'osanseviero', 'merve', 'stecas'], 'count': 5}, {'reaction': '👍', 'users': ['dhuynh95', 'skrishna', 'osanseviero', 'stecas'], 'count': 4}]",2024-01-30 10:02:07,2024-01-30 10:02:19.676,[],/posts/gsarti/766450938511715,9,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png,883.0,Abubakar Abid,abidlabs,744389946746370,"[{'type': 'text', 'value': ""The most interesting LLM benchmark I've seen so far... reminder that there's lots of characterization of LLMs still yet to do."", 'raw': ""The most interesting LLM benchmark I've seen so far... reminder that there's lots of characterization of LLMs still yet to do.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2312.06281'}, 'url': 'https://huggingface.co/papers/2312.06281', 'raw': 'https://huggingface.co/papers/2312.06281', 'label': 'EQ-Bench: An Emotional Intelligence Benchmark for Large Language Models (2312.06281)'}]","The most interesting LLM benchmark I've seen so far... reminder that there's lots of characterization of LLMs still yet to do. + +https://huggingface.co/papers/2312.06281","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/608b8bb39d7c9519b4adae19/2IqjU7FhRuv3e1v_Zqd1K.png'}]",[],"[{'reaction': '👍', 'users': ['bradwray', 'wuyinansooymilk', 'osanseviero', 'clem', 'doraemom0812'], 'count': 5}]",2024-01-30 04:25:00,2024-01-30 04:25:14.652,[],/posts/abidlabs/744389946746370,22,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1661497922734-62f4ac43567dbf9a39f75474.jpeg,79.0,Daniel Huynh,dhuynh95,549309663763755,"[{'type': 'text', 'value': 'Fascinating paper by Rand shows that there is no statistically significant difference between using LLMs or regular internet to craft operational plans for bioweapons!', 'raw': 'Fascinating paper by Rand shows that there is no statistically significant difference between using LLMs or regular internet to craft operational plans for bioweapons!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is the first paper that actually studies the impact of AI on bioweapons from an operational perspective and looks at the big question: is AI any better than just using public data on the Internet?', 'raw': 'This is the first paper that actually studies the impact of AI on bioweapons from an operational perspective and looks at the big question: is AI any better than just using public data on the Internet?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'As most of the data is most likely out there, an LLM would just be a more efficient tool to come up with the relevant information, but it seems that its impact is limited.', 'raw': 'As most of the data is most likely out there, an LLM would just be a more efficient tool to come up with the relevant information, but it seems that its impact is limited.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://www.rand.org/pubs/research_reports/RRA2977-2.html', 'raw': 'https://www.rand.org/pubs/research_reports/RRA2977-2.html'}, {'type': 'text', 'value': ' ', 'raw': ' '}]","Fascinating paper by Rand shows that there is no statistically significant difference between using LLMs or regular internet to craft operational plans for bioweapons! + +This is the first paper that actually studies the impact of AI on bioweapons from an operational perspective and looks at the big question: is AI any better than just using public data on the Internet? + +As most of the data is most likely out there, an LLM would just be a more efficient tool to come up with the relevant information, but it seems that its impact is limited. + +https://www.rand.org/pubs/research_reports/RRA2977-2.html ",[],[],"[{'reaction': '👍', 'users': ['osanseviero', 'samusenps', 'clem', 'Dlbk', 'rreed-pha', 'merve'], 'count': 6}, {'reaction': '🤯', 'users': ['clem', 'SivilTaram', 'omaryshchenko'], 'count': 3}]",2024-01-29 20:45:04,2024-01-31 02:19:16.336,"[{'_id': '63a278c3f30c4642278d4259', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63a278c3f30c4642278d4259/B703mZ-yrcgYU-WZCFJGy.jpeg', 'fullname': 'Celso F', 'name': 'celsowm', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 27, 'isFollowing': False}]",/posts/dhuynh95/549309663763755,16,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,324429206613343,"[{'type': 'text', 'value': 'SliceGPT: Compress Large Language Models by Deleting Rows and Columns', 'raw': 'SliceGPT: Compress Large Language Models by Deleting Rows and Columns'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.15024'}, 'url': 'https://huggingface.co/papers/2401.15024', 'raw': 'https://huggingface.co/papers/2401.15024', 'label': 'SliceGPT: Compress Large Language Models by Deleting Rows and Columns (2401.15024)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Large language models have become the cornerstone of natural language processing, but their use comes with substantial costs in terms of compute and memory resources. Sparsification provides a solution to alleviate these resource constraints, and recent works have shown that trained models can be sparsified post-hoc. Existing sparsification techniques face challenges as they need additional data structures and offer constrained speedup with current hardware. In this paper we present SliceGPT, a new post-training sparsification scheme which replaces each weight matrix with a smaller (dense) matrix, reducing the embedding dimension of the network. Through extensive experimentation, we show that SliceGPT can remove up to 25% of the model parameters (including embeddings) for LLAMA2-70B, OPT 66B and Phi-2 models while maintaining 99%, 99% and 90% zero-shot task performance of the dense model respectively. Our sliced models run on fewer GPUs and run faster without any additional code optimization: on 24GB consumer GPUs we reduce the total compute for inference on LLAMA2-70B to 64% of that of the dense model; on 40GB A100 GPUs we reduce it to 66%. We offer a new insight, computational invariance in transformer networks, which enables SliceGPT and we hope it will inspire and enable future avenues to reduce memory and computation demands for pre-trained models.', 'raw': 'Large language models have become the cornerstone of natural language processing, but their use comes with substantial costs in terms of compute and memory resources. Sparsification provides a solution to alleviate these resource constraints, and recent works have shown that trained models can be sparsified post-hoc. Existing sparsification techniques face challenges as they need additional data structures and offer constrained speedup with current hardware. In this paper we present SliceGPT, a new post-training sparsification scheme which replaces each weight matrix with a smaller (dense) matrix, reducing the embedding dimension of the network. Through extensive experimentation, we show that SliceGPT can remove up to 25% of the model parameters (including embeddings) for LLAMA2-70B, OPT 66B and Phi-2 models while maintaining 99%, 99% and 90% zero-shot task performance of the dense model respectively. Our sliced models run on fewer GPUs and run faster without any additional code optimization: on 24GB consumer GPUs we reduce the total compute for inference on LLAMA2-70B to 64% of that of the dense model; on 40GB A100 GPUs we reduce it to 66%. We offer a new insight, computational invariance in transformer networks, which enables SliceGPT and we hope it will inspire and enable future avenues to reduce memory and computation demands for pre-trained models.'}]","SliceGPT: Compress Large Language Models by Deleting Rows and Columns + +https://huggingface.co/papers/2401.15024 + +Large language models have become the cornerstone of natural language processing, but their use comes with substantial costs in terms of compute and memory resources. Sparsification provides a solution to alleviate these resource constraints, and recent works have shown that trained models can be sparsified post-hoc. Existing sparsification techniques face challenges as they need additional data structures and offer constrained speedup with current hardware. In this paper we present SliceGPT, a new post-training sparsification scheme which replaces each weight matrix with a smaller (dense) matrix, reducing the embedding dimension of the network. Through extensive experimentation, we show that SliceGPT can remove up to 25% of the model parameters (including embeddings) for LLAMA2-70B, OPT 66B and Phi-2 models while maintaining 99%, 99% and 90% zero-shot task performance of the dense model respectively. Our sliced models run on fewer GPUs and run faster without any additional code optimization: on 24GB consumer GPUs we reduce the total compute for inference on LLAMA2-70B to 64% of that of the dense model; on 40GB A100 GPUs we reduce it to 66%. We offer a new insight, computational invariance in transformer networks, which enables SliceGPT and we hope it will inspire and enable future avenues to reduce memory and computation demands for pre-trained models.","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/js8LgZSyDmTlzlmLoGi4Y.png'}]",[],"[{'reaction': '👍', 'users': ['osanseviero', 'vladbogo', 'taufiqdp', 'victor', 'CHianogoi', 'omaryshchenko', 'AdinaY', 'merve', 'Locutusque', 'Ji-Ha', 'Saleh'], 'count': 11}]",2024-01-29 17:05:36,2024-01-29 17:05:36.112,[],/posts/akhaliq/324429206613343,30,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,954032654837367,"[{'type': 'text', 'value': ""With the Google announcement last week, I think we're now officially the only AI startup out there who has commercial collaborations with all the major cloud providers (AWS, GCP, Azure) and hardware providers (Nvidia, AMD, Intel, Qualcomm,...), making our vision of being the independent and agnostic platform for all AI builders truer than ever! "", 'raw': ""With the Google announcement last week, I think we're now officially the only AI startup out there who has commercial collaborations with all the major cloud providers (AWS, GCP, Azure) and hardware providers (Nvidia, AMD, Intel, Qualcomm,...), making our vision of being the independent and agnostic platform for all AI builders truer than ever! ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Let's go!"", 'raw': ""Let's go!""}]","With the Google announcement last week, I think we're now officially the only AI startup out there who has commercial collaborations with all the major cloud providers (AWS, GCP, Azure) and hardware providers (Nvidia, AMD, Intel, Qualcomm,...), making our vision of being the independent and agnostic platform for all AI builders truer than ever! + +Let's go!",[],[],"[{'reaction': '❤️', 'users': ['on1onmangoes', 'osanseviero', 'santiviquez', 'mvaloatto', 'abbasm2', 'donbale', 'gsarti', 'vladbogo', 'MadElf1337', 'SivilTaram', 'taufiqdp', 'rreed-pha', 'victor', 'neovalle', 'qnixsynapse', 'andrewrreed', 'vivekrp', 'merve', 'joaogante', 'Bondstreet', 'LucasThil', 'maywell', 'duraad', 'hysts', 'thibaultM'], 'count': 25}]",2024-01-29 16:36:17,2024-01-29 16:36:17.971,[],/posts/clem/954032654837367,35,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg,86.0,Santiago Viquez,santiviquez,138523411387083,"[{'type': 'text', 'value': 'Confidence * may be * all you need.', 'raw': 'Confidence * may be * all you need.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'A simple average of the log probabilities of the output tokens from an LLM might be all it takes to tell if the model is hallucinating.\U0001fae8', 'raw': 'A simple average of the log probabilities of the output tokens from an LLM might be all it takes to tell if the model is hallucinating.\U0001fae8'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The idea is that if a model is not confident (low output token probabilities), the model may be inventing random stuff.', 'raw': 'The idea is that if a model is not confident (low output token probabilities), the model may be inventing random stuff.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'In these two papers:', 'raw': 'In these two papers:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. ', 'raw': '1. '}, {'type': 'link', 'href': 'https://aclanthology.org/2023.eacl-main.75/', 'raw': 'https://aclanthology.org/2023.eacl-main.75/'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. ', 'raw': '2. '}, {'type': 'link', 'href': 'https://arxiv.org/abs/2303.08896', 'raw': 'https://arxiv.org/abs/2303.08896'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The authors claim that this simple method is the best heuristic for detecting hallucinations. The beauty is that it only uses the generated token probabilities, so it can be implemented at inference time ⚡', 'raw': 'The authors claim that this simple method is the best heuristic for detecting hallucinations. The beauty is that it only uses the generated token probabilities, so it can be implemented at inference time ⚡'}]","Confidence * may be * all you need. + +A simple average of the log probabilities of the output tokens from an LLM might be all it takes to tell if the model is hallucinating.🫨 + +The idea is that if a model is not confident (low output token probabilities), the model may be inventing random stuff. + +In these two papers: +1. https://aclanthology.org/2023.eacl-main.75/ +2. https://arxiv.org/abs/2303.08896 + +The authors claim that this simple method is the best heuristic for detecting hallucinations. The beauty is that it only uses the generated token probabilities, so it can be implemented at inference time ⚡","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/629a173153a72d997d3f57d0/ut81ozACPUSrqyDmmuEiq.jpeg'}]",[],"[{'reaction': '❤️', 'users': ['clem', 'leegao19', 'osanseviero', 'victor', 's3nh', 'abbasm2', 'gsarti', 'rreed-pha', 'stolsvik'], 'count': 9}]",2024-01-29 16:08:58,2024-01-30 11:10:49.141,"[{'_id': '62f4ac43567dbf9a39f75474', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1661497922734-62f4ac43567dbf9a39f75474.jpeg', 'fullname': 'Daniel Huynh', 'name': 'dhuynh95', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 79, 'isFollowing': False}, {'_id': '629a173153a72d997d3f57d0', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1657144463525-629a173153a72d997d3f57d0.jpeg', 'fullname': 'Santiago Viquez', 'name': 'santiviquez', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 86, 'isFollowing': False}, {'_id': '5e7749883d77a72421292d07', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg', 'fullname': 'Gabriele Sarti', 'name': 'gsarti', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 226, 'isFollowing': False}]",/posts/santiviquez/138523411387083,56,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,522955934800613,"[{'type': 'text', 'value': 'TURNA: the biggest Turkish encoder-decoder model up-to-date, based on UL2 architecture, comes in 1.1B params 🐦 😍 ', 'raw': 'TURNA: the biggest Turkish encoder-decoder model up-to-date, based on UL2 architecture, comes in 1.1B params 🐦 😍 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The researchers also released models fine-tuned on various downstream tasks including text categorization, NER, summarization and more! 🤯 Great models ', 'raw': 'The researchers also released models fine-tuned on various downstream tasks including text categorization, NER, summarization and more! 🤯 Great models '}, {'type': 'mention', 'user': 'onurgu', 'raw': '@onurgu'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'gokceuludogan', 'raw': '@gokceuludogan'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'yirmibesogluz', 'raw': '@yirmibesogluz'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'furkanakkurt1618', 'raw': '@furkanakkurt1618'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'uskudarli', 'raw': '@uskudarli'}, {'type': 'text', 'value': ' 👏 ', 'raw': ' 👏 '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Fine-tuned models are in this collection 👉 ', 'raw': 'Fine-tuned models are in this collection 👉 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'boun-tabi-LMG/turna-ft-65b3f20aff5235e6cad07c1b'}, 'url': 'https://huggingface.co/collections/boun-tabi-LMG/turna-ft-65b3f20aff5235e6cad07c1b', 'raw': 'https://huggingface.co/collections/boun-tabi-LMG/turna-ft-65b3f20aff5235e6cad07c1b'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pre-trained models are in this collection 👉 ', 'raw': 'Pre-trained models are in this collection 👉 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'boun-tabi-LMG/turna-65ad340e5df673eec66e48c7'}, 'url': 'https://huggingface.co/collections/boun-tabi-LMG/turna-65ad340e5df673eec66e48c7', 'raw': 'https://huggingface.co/collections/boun-tabi-LMG/turna-65ad340e5df673eec66e48c7'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}]","TURNA: the biggest Turkish encoder-decoder model up-to-date, based on UL2 architecture, comes in 1.1B params 🐦 😍 +The researchers also released models fine-tuned on various downstream tasks including text categorization, NER, summarization and more! 🤯 Great models @onurgu @gokceuludogan @yirmibesogluz @furkanakkurt1618 @uskudarli 👏 +Fine-tuned models are in this collection 👉 https://huggingface.co/collections/boun-tabi-LMG/turna-ft-65b3f20aff5235e6cad07c1b +Pre-trained models are in this collection 👉 https://huggingface.co/collections/boun-tabi-LMG/turna-65ad340e5df673eec66e48c7 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/eJkT6crvMe_mIIea2mXSa.png'}]","[{'_id': '62aae09df29ff279b510edb5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62aae09df29ff279b510edb5/yrVWyaP8v7xOmUpP9xOew.jpeg', 'fullname': 'Furkan Akkurt', 'name': 'furkanakkurt1618', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '6103d0c1161356e7045b94da', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1660393141770-6103d0c1161356e7045b94da.png', 'fullname': 'Gökçe Uludoğan', 'name': 'gokceuludogan', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12}, {'_id': '62bdf8c8df69b47307cd9609', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62bdf8c8df69b47307cd9609/G9p6iwLm4KnRyky8oKber.jpeg', 'fullname': 'Onur Güngör', 'name': 'onurgu', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 20}, {'_id': '5f21d64e2128ba01bdf08ba9', 'avatarUrl': '/avatars/1c4743ce1a9ed8e5a99cfa39531fa343.svg', 'fullname': 'S Uskudarli', 'name': 'uskudarli', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3}, {'_id': '630b476079d18d5e53e30f09', 'avatarUrl': '/avatars/99a163fd4a797a1621146d6f2188cc1f.svg', 'fullname': 'Zeynep Yirmibeşoğlu', 'name': 'yirmibesogluz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2}]","[{'reaction': '👍', 'users': ['clefourrier', 'mertyanik', 'clem', 'osanseviero', 'gokceuludogan', 'ridvanpal'], 'count': 6}, {'reaction': '❤️', 'users': ['osanseviero', 's3nh', 'ktdilankaya', 'ismaildlml', 'ridvanpal'], 'count': 5}]",2024-01-29 15:02:35,2024-01-29 15:02:35.004,[],/posts/merve/522955934800613,53,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png,2266.0,Tom Aarsen,tomaarsen,792237167736897,"[{'type': 'text', 'value': 'Sentence Transformers v2.3.0 has been released! It includes several bug fixes, enhanced model loading including custom models & no more unnecessary file downloads, improved performance, a powerful loss function, and much more!', 'raw': 'Sentence Transformers v2.3.0 has been released! It includes several bug fixes, enhanced model loading including custom models & no more unnecessary file downloads, improved performance, a powerful loss function, and much more!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Details:', 'raw': 'Details:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⬆ Uploading Models to the Hub with ', 'raw': '⬆ Uploading Models to the Hub with '}, {'type': 'inline_code', 'code': 'save_to_hub', 'raw': '`save_to_hub`'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⬇ Downloading Models from the Hub now downloads only necessary files.', 'raw': '⬇ Downloading Models from the Hub now downloads only necessary files.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚙ Custom Models (such as ', 'raw': '⚙ Custom Models (such as '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'jinaai/jina-embeddings-v2-base-de'}, 'url': 'https://huggingface.co/jinaai/jina-embeddings-v2-base-de', 'raw': 'https://huggingface.co/jinaai/jina-embeddings-v2-base-de'}, {'type': 'text', 'value': ') can now be loaded with ', 'raw': ') can now be loaded with '}, {'type': 'inline_code', 'code': 'trust_remote_code=True', 'raw': '`trust_remote_code=True`'}, {'type': 'text', 'value': '.', 'raw': '.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🔍 Models can now be loaded at specific revisions (e.g. commit hashes or git branches).', 'raw': '🔍 Models can now be loaded at specific revisions (e.g. commit hashes or git branches).'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🖥️ Various device fixes; models will now always operate on the device that you specify.', 'raw': '🖥️ Various device fixes; models will now always operate on the device that you specify.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📉 A new ""Cached"" variant of the powerful Multiple Negatives Ranking Loss allows common hardware to reach performance previously only accessible on multi-gpu clusters.', 'raw': '📉 A new ""Cached"" variant of the powerful Multiple Negatives Ranking Loss allows common hardware to reach performance previously only accessible on multi-gpu clusters.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🐎 Computation time of Community Detection was decreased significantly (7x speedup at 500k sentences :exploding_head:)', 'raw': '🐎 Computation time of Community Detection was decreased significantly (7x speedup at 500k sentences :exploding_head:)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🪶 Removed the now unnecessary ""torchvision"" dependency for a smaller installation.', 'raw': '🪶 Removed the now unnecessary ""torchvision"" dependency for a smaller installation.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Check out the full changelog here: ', 'raw': 'Check out the full changelog here: '}, {'type': 'link', 'href': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v2.3.0', 'raw': 'https://github.com/UKPLab/sentence-transformers/releases/tag/v2.3.0'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'll be working on much more changes in the near future, so expect more exciting updates. If you encounter any issues, or have any questions or feature requests, don't hesitate to open an issue on the repository: "", 'raw': ""I'll be working on much more changes in the near future, so expect more exciting updates. If you encounter any issues, or have any questions or feature requests, don't hesitate to open an issue on the repository: ""}, {'type': 'link', 'href': 'https://github.com/UKPLab/sentence-transformers/issues', 'raw': 'https://github.com/UKPLab/sentence-transformers/issues'}]","Sentence Transformers v2.3.0 has been released! It includes several bug fixes, enhanced model loading including custom models & no more unnecessary file downloads, improved performance, a powerful loss function, and much more! + +Details: +⬆ Uploading Models to the Hub with `save_to_hub`. +⬇ Downloading Models from the Hub now downloads only necessary files. +⚙ Custom Models (such as https://huggingface.co/jinaai/jina-embeddings-v2-base-de) can now be loaded with `trust_remote_code=True`. +🔍 Models can now be loaded at specific revisions (e.g. commit hashes or git branches). +🖥️ Various device fixes; models will now always operate on the device that you specify. +📉 A new ""Cached"" variant of the powerful Multiple Negatives Ranking Loss allows common hardware to reach performance previously only accessible on multi-gpu clusters. +🐎 Computation time of Community Detection was decreased significantly (7x speedup at 500k sentences :exploding_head:) +🪶 Removed the now unnecessary ""torchvision"" dependency for a smaller installation. + +Check out the full changelog here: https://github.com/UKPLab/sentence-transformers/releases/tag/v2.3.0 + +I'll be working on much more changes in the near future, so expect more exciting updates. If you encounter any issues, or have any questions or feature requests, don't hesitate to open an issue on the repository: https://github.com/UKPLab/sentence-transformers/issues",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'clem', 'victor', 'chkla', 'santiviquez', 'lbourdois', 'djsull', 'taufiqdp', 'shm141', 'nickprock', 'hysts'], 'count': 11}]",2024-01-29 13:01:06,2024-01-30 20:32:55.094,"[{'_id': '6317233cc92fd6fee317e030', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6317233cc92fd6fee317e030/cJHSvvimr1kqgQfHOjO5n.png', 'fullname': 'Tom Aarsen', 'name': 'tomaarsen', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2266, 'isFollowing': False}]",/posts/tomaarsen/792237167736897,103,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png,883.0,Abubakar Abid,abidlabs,528535313265224,"[{'type': 'text', 'value': '𝗛𝗼𝘄 𝘄𝗲 𝗺𝗮𝗱𝗲 𝗚𝗿𝗮𝗱𝗶𝗼 𝗳𝗮𝘀𝘁𝗲𝗿 𝗯𝘆... 𝘀𝗹𝗼𝘄𝗶𝗻𝗴 𝗶𝘁 𝗱𝗼𝘄𝗻!', 'raw': '𝗛𝗼𝘄 𝘄𝗲 𝗺𝗮𝗱𝗲 𝗚𝗿𝗮𝗱𝗶𝗼 𝗳𝗮𝘀𝘁𝗲𝗿 𝗯𝘆... 𝘀𝗹𝗼𝘄𝗶𝗻𝗴 𝗶𝘁 𝗱𝗼𝘄𝗻!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'About a month ago, ', 'raw': 'About a month ago, '}, {'type': 'mention', 'user': 'oobabooga', 'raw': '@oobabooga'}, {'type': 'text', 'value': ' (who built the popular text generation webui) reported an interesting issue to the Gradio team. After upgrading to Gradio 4, ', 'raw': ' (who built the popular text generation webui) reported an interesting issue to the Gradio team. After upgrading to Gradio 4, '}, {'type': 'mention', 'user': 'oobabooga', 'raw': '@oobabooga'}, {'type': 'text', 'value': ' noticed that chatbots that streamed very quickly had a lag before their text would show up in the Gradio app. ', 'raw': ' noticed that chatbots that streamed very quickly had a lag before their text would show up in the Gradio app. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""After some investigation, we determined that the Gradio frontend would receive the updates from the backend immediately, but the browser would lag before rendering the changes on the screen. The main difference between Gradio 3 and Gradio 4 was that we migrated the communication protocol between the backend and frontend from Websockets (WS) to Server-Side Events (SSE), but we couldn't figure out why this would affect the browser's ability to render the streaming updates it was receiving."", 'raw': ""After some investigation, we determined that the Gradio frontend would receive the updates from the backend immediately, but the browser would lag before rendering the changes on the screen. The main difference between Gradio 3 and Gradio 4 was that we migrated the communication protocol between the backend and frontend from Websockets (WS) to Server-Side Events (SSE), but we couldn't figure out why this would affect the browser's ability to render the streaming updates it was receiving.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After diving deep into browsers events, ', 'raw': 'After diving deep into browsers events, '}, {'type': 'mention', 'user': 'aliabid94', 'raw': '@aliabid94'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'pngwn', 'raw': '@pngwn'}, {'type': 'text', 'value': ' made a realization: most browsers treat WS events (specifically the ', 'raw': ' made a realization: most browsers treat WS events (specifically the '}, {'type': 'inline_code', 'code': 'WebSocket.onmessage', 'raw': '`WebSocket.onmessage`'}, {'type': 'text', 'value': ' function) with a lower priority than SSE events (', 'raw': ' function) with a lower priority than SSE events ('}, {'type': 'inline_code', 'code': 'EventSource.onmessage', 'raw': '`EventSource.onmessage`'}, {'type': 'text', 'value': "" function), which allowed the browser to repaint the window between WS messages. With SSE, the streaming updates would stack up in the browser's event stack and be prioritized over any browser repaint. The browser would eventually clear the stack but it would take some time to go through each update, which produced a lag."", 'raw': "" function), which allowed the browser to repaint the window between WS messages. With SSE, the streaming updates would stack up in the browser's event stack and be prioritized over any browser repaint. The browser would eventually clear the stack but it would take some time to go through each update, which produced a lag.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'We debated different options, but the solution that we implemented was to introduce throttling: we slowed down how frequently we would push updates to the browser event stack to a maximum rate of 20/sec. Although this seemingly “slowed down” Gradio streaming, it actually would allow browsers to process updates in real-time and provide a much better experience to end users of Gradio apps. ', 'raw': 'We debated different options, but the solution that we implemented was to introduce throttling: we slowed down how frequently we would push updates to the browser event stack to a maximum rate of 20/sec. Although this seemingly “slowed down” Gradio streaming, it actually would allow browsers to process updates in real-time and provide a much better experience to end users of Gradio apps. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'See the PR here: ', 'raw': 'See the PR here: '}, {'type': 'link', 'href': 'https://github.com/gradio-app/gradio/pull/7084', 'raw': 'https://github.com/gradio-app/gradio/pull/7084'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Kudos to ', 'raw': 'Kudos to '}, {'type': 'mention', 'user': 'aliabid94', 'raw': '@aliabid94'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'pngwn', 'raw': '@pngwn'}, {'type': 'text', 'value': ' for the fix, and to ', 'raw': ' for the fix, and to '}, {'type': 'mention', 'user': 'oobabooga', 'raw': '@oobabooga'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'pseudotensor', 'raw': '@pseudotensor'}, {'type': 'text', 'value': ' for helping us test it out!', 'raw': ' for helping us test it out!'}, {'type': 'new_line', 'raw': '\n'}]","𝗛𝗼𝘄 𝘄𝗲 𝗺𝗮𝗱𝗲 𝗚𝗿𝗮𝗱𝗶𝗼 𝗳𝗮𝘀𝘁𝗲𝗿 𝗯𝘆... 𝘀𝗹𝗼𝘄𝗶𝗻𝗴 𝗶𝘁 𝗱𝗼𝘄𝗻! + +About a month ago, @oobabooga (who built the popular text generation webui) reported an interesting issue to the Gradio team. After upgrading to Gradio 4, @oobabooga noticed that chatbots that streamed very quickly had a lag before their text would show up in the Gradio app. + +After some investigation, we determined that the Gradio frontend would receive the updates from the backend immediately, but the browser would lag before rendering the changes on the screen. The main difference between Gradio 3 and Gradio 4 was that we migrated the communication protocol between the backend and frontend from Websockets (WS) to Server-Side Events (SSE), but we couldn't figure out why this would affect the browser's ability to render the streaming updates it was receiving. + +After diving deep into browsers events, @aliabid94 and @pngwn made a realization: most browsers treat WS events (specifically the `WebSocket.onmessage` function) with a lower priority than SSE events (`EventSource.onmessage` function), which allowed the browser to repaint the window between WS messages. With SSE, the streaming updates would stack up in the browser's event stack and be prioritized over any browser repaint. The browser would eventually clear the stack but it would take some time to go through each update, which produced a lag. + +We debated different options, but the solution that we implemented was to introduce throttling: we slowed down how frequently we would push updates to the browser event stack to a maximum rate of 20/sec. Although this seemingly “slowed down” Gradio streaming, it actually would allow browsers to process updates in real-time and provide a much better experience to end users of Gradio apps. + +See the PR here: https://github.com/gradio-app/gradio/pull/7084 + +Kudos to @aliabid94 and @pngwn for the fix, and to @oobabooga and @pseudotensor for helping us test it out! +",[],"[{'_id': '61d7830bb77a8c48d48bc755', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1655236986178-61d7830bb77a8c48d48bc755.png', 'fullname': 'Ali Abid', 'name': 'aliabid94', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 69}, {'_id': '63d0597ff2341424c808b771', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63d0597ff2341424c808b771/63do8YNK-y82YamrX9sfj.jpeg', 'fullname': 'oobabooga', 'name': 'oobabooga', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 112}, {'_id': '61d5bf2f0435582ab69f8f6d', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1641398053462-noauth.jpeg', 'fullname': 'Pete', 'name': 'pngwn', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 147}, {'_id': '6308791ac038bf42d568153f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6308791ac038bf42d568153f/z9TovAddXU3OQR9N_2KFP.jpeg', 'fullname': 'Jonathan McKinney', 'name': 'pseudotensor', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6}]","[{'reaction': '👍', 'users': ['kramp', 'osanseviero', 'pngwn', 'Ifeanyi', 'xpgx1', 'BramVanroy', 'takeraparterer', 'samusenps', 'clem', 'phanes', 'gate369', 'fffiloni', 'sbarman25', 'taufiqdp', 'akhaliq', 'ysharma', 'davanstrien', 'gsarti', 'lunarflu', 'julien-c', 'merve', 'hysts', 'freddyaboulton'], 'count': 23}, {'reaction': '❤️', 'users': ['clem', 'akhaliq', 'ysharma', 'kramp', 'santiviquez', 'gsarti', 'lunarflu', 'julien-c'], 'count': 8}, {'reaction': '🤗', 'users': ['takeraparterer', 'clem', 'akhaliq', 'ysharma', 'gsarti', 'lunarflu', 'julien-c'], 'count': 7}, {'reaction': '🤯', 'users': ['lunarflu', 'julien-c', 'sbrandeis'], 'count': 3}, {'reaction': '🤝', 'users': ['julien-c'], 'count': 1}]",2024-01-22 20:01:07,2024-01-24 15:09:48.086,"[{'_id': '61868ce808aae0b5499a2a95', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61868ce808aae0b5499a2a95/F6BA0anbsoY_Z7M1JrwOe.jpeg', 'fullname': 'Sylvain Filoni', 'name': 'fffiloni', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 6819, 'isFollowing': False}, {'_id': '60d2dc1007da9c17c72708f8', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1624431552569-noauth.jpeg', 'fullname': 'yuvraj sharma', 'name': 'ysharma', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 797, 'isFollowing': False}, {'_id': '6439622f7245b3b7f4526850', 'avatarUrl': '/avatars/53fe807d9ffaf2c23ac8a13756a2486b.svg', 'fullname': 'Nick Durkee', 'name': 'ndurkee', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/abidlabs/528535313265224,144,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg,2489.0,Clem 🤗,clem,970025506569107,"[{'type': 'text', 'value': 'Re-posting ', 'raw': 'Re-posting '}, {'type': 'mention', 'user': 'karpathy', 'raw': '@karpathy'}, {'type': 'text', 'value': ""'s blogpost here because it's down on "", 'raw': ""'s blogpost here because it's down on ""}, {'type': 'link', 'href': 'https://karpathy.github.io/2024/01/21/selfdriving-agi', 'raw': 'https://karpathy.github.io/2024/01/21/selfdriving-agi'}, {'type': 'text', 'value': '. What do you all think?', 'raw': '. What do you all think?'}]",Re-posting @karpathy's blogpost here because it's down on https://karpathy.github.io/2024/01/21/selfdriving-agi. What do you all think?,"[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/6n3rfOGCL-J7ETz_0Iphm.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/itmgDQNABi9_x2o0s5qji.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/F8yJ2CY9oqQDwjypku-eH.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/sX1rNJXHbGBWqV0VoNUIO.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e67bdd61009063689407479/O2F0HRuUXnL_LQSDLFdlV.png'}]","[{'_id': '62f83661fe21cc4875221c0f', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1660434061546-62f83661fe21cc4875221c0f.jpeg', 'fullname': 'Andrej K', 'name': 'karpathy', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 672}]","[{'reaction': '👍', 'users': ['elpreneurAbdo', 'RodolpheD', 'Paul-HF', 'SaylorTwift', 'stefan-jo', 'arnaudliotta', 'lunarflu', 'julien-c', 'merve', 'radames', 'pierrci'], 'count': 11}, {'reaction': '🤯', 'users': ['osanseviero', 'chansung', 'samu', 'lunarflu', 'julien-c', 'merve'], 'count': 6}, {'reaction': '❤️', 'users': ['taufiqdp', 'lunarflu', 'julien-c', 'merve'], 'count': 4}]",2024-01-22 19:38:40,2024-01-23 14:30:48.322,"[{'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}, {'_id': '65ae8908b64e1c2389aafd94', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65ae8908b64e1c2389aafd94/bPoQffBL8fASGtvgDpf1F.png', 'fullname': 'Luke Shaffer', 'name': 'LukeES', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '5f061d845d08220171a0ae34', 'avatarUrl': '/avatars/a307da2b473607b1dbf668e7e1168b07.svg', 'fullname': 'Srulik Ben David', 'name': 'Srulikbd', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 1, 'isFollowing': False}]",/posts/clem/970025506569107,60,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,355654182325805,"[{'type': 'text', 'value': 'Depth Anything: Unleashing the Power of Large-Scale Unlabeled Data', 'raw': 'Depth Anything: Unleashing the Power of Large-Scale Unlabeled Data'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'paper page: ', 'raw': 'paper page: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2401.10891'}, 'url': 'https://huggingface.co/papers/2401.10891', 'raw': 'https://huggingface.co/papers/2401.10891', 'label': 'Depth Anything: Unleashing the Power of Large-Scale Unlabeled Data (2401.10891)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'demo: ', 'raw': 'demo: '}, {'type': 'resource', 'resource': {'type': 'space', 'id': 'LiheYoung/Depth-Anything'}, 'url': 'https://huggingface.co/spaces/LiheYoung/Depth-Anything', 'raw': 'https://huggingface.co/spaces/LiheYoung/Depth-Anything'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Depth Anything is trained on 1.5M labeled images and 62M+ unlabeled images jointly, providing the most capable Monocular Depth Estimation (MDE) foundation models with the following features:', 'raw': 'Depth Anything is trained on 1.5M labeled images and 62M+ unlabeled images jointly, providing the most capable Monocular Depth Estimation (MDE) foundation models with the following features:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'zero-shot relative depth estimation, better than MiDaS v3.1 (BEiTL-512)', 'raw': 'zero-shot relative depth estimation, better than MiDaS v3.1 (BEiTL-512)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'zero-shot metric depth estimation, better than ZoeDepth', 'raw': 'zero-shot metric depth estimation, better than ZoeDepth'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'optimal in-domain fine-tuning and evaluation on NYUv2 and KITTI', 'raw': 'optimal in-domain fine-tuning and evaluation on NYUv2 and KITTI'}, {'type': 'new_line', 'raw': '\n'}]","Depth Anything: Unleashing the Power of Large-Scale Unlabeled Data + +paper page: https://huggingface.co/papers/2401.10891 +demo: https://huggingface.co/spaces/LiheYoung/Depth-Anything + +Depth Anything is trained on 1.5M labeled images and 62M+ unlabeled images jointly, providing the most capable Monocular Depth Estimation (MDE) foundation models with the following features: + +zero-shot relative depth estimation, better than MiDaS v3.1 (BEiTL-512) + +zero-shot metric depth estimation, better than ZoeDepth + +optimal in-domain fine-tuning and evaluation on NYUv2 and KITTI +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/uOe_5hlaiEisCsyJozbLZ.mp4'}]",[],"[{'reaction': '👍', 'users': ['Norod78', 'jgitsolutions', 'clem', 'Chunte', 'alazro', 'victor', 'samusenps', 'XaevrM', 'NeuralNovel', 'Kutches'], 'count': 10}, {'reaction': '❤️', 'users': ['s3nh', 'NeuralNovel', 'clem', 'Triyansha'], 'count': 4}]",2024-01-22 14:42:51,2024-01-22 14:42:51.301,[],/posts/akhaliq/355654182325805,31,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg,233.0,s3nh,s3nh,589001331123431,"[{'type': 'text', 'value': 'GPU Poor POV: Dont be Afraid :D', 'raw': 'GPU Poor POV: Dont be Afraid :D'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Sometimes we dont want to do something because of low self esteem,', 'raw': 'Sometimes we dont want to do something because of low self esteem,'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I ofter hear 'its to hard for me','i am not an expert','i do not know how to do it', etc. These words are never the truth, we should not be afraid and try to build something because there is no additive value without a failure. "", 'raw': ""I ofter hear 'its to hard for me','i am not an expert','i do not know how to do it', etc. These words are never the truth, we should not be afraid and try to build something because there is no additive value without a failure. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Same things comes in LLMs, there is a lot of fancy words happening, but whats is more important is that there are also people who are constantly building so other can build. Diving into finetuning LLMs is incredibly simple if we assume using axolotl library and pretrains stored on huggingface. ', 'raw': 'Same things comes in LLMs, there is a lot of fancy words happening, but whats is more important is that there are also people who are constantly building so other can build. Diving into finetuning LLMs is incredibly simple if we assume using axolotl library and pretrains stored on huggingface. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'All we need is an idea, our GPU Poor desktop or colab notebooks and these steps:', 'raw': 'All we need is an idea, our GPU Poor desktop or colab notebooks and these steps:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': ""git clone https://github.com/OpenAccess-AI-Collective/axolotl\ncd axolotl\n\npip3 install packaging\npip3 install -e '.[flash-attn,deepspeed]'"", 'raw': ""```\ngit clone https://github.com/OpenAccess-AI-Collective/axolotl\ncd axolotl\n\npip3 install packaging\npip3 install -e '.[flash-attn,deepspeed]'\n```""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'After installation process we can go to examples, and modify configs to our own needs. ', 'raw': 'After installation process we can go to examples, and modify configs to our own needs. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lets jump into ', 'raw': 'Lets jump into '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'axolotl\\examples\\llama-2\\qlora.yml ', 'raw': '```\naxolotl\\examples\\llama-2\\qlora.yml \n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'and change ', 'raw': 'and change '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'base_model: NousResearch/Llama-2-7b-hf', 'raw': '```\nbase_model: NousResearch/Llama-2-7b-hf\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'to ', 'raw': 'to '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'base_model: TinyLlama/TinyLlama-1.1B-Chat-v1.0', 'raw': '```\nbase_model: TinyLlama/TinyLlama-1.1B-Chat-v1.0\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'choose dataset from huge amounts of dataset that are possible to use from hf.co/datasets and tweak additional params like batch_size, number of epochs, how often do we want to save our model and many more (which I wont focus on rn). ', 'raw': 'choose dataset from huge amounts of dataset that are possible to use from hf.co/datasets and tweak additional params like batch_size, number of epochs, how often do we want to save our model and many more (which I wont focus on rn). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Then, ', 'raw': 'Then, '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'accelerate launch -m axolotl.cli.train examples/llama-2/qlora.yml', 'raw': '```\naccelerate launch -m axolotl.cli.train examples/llama-2/qlora.yml\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Will allow to start the finetuning process on structure defined strictly by you. After finetuning, model will be saved in path provided in config, and you can check out if it performs better than the base one. Or even you can put it on llm Leaderboard to check if we do not have new SOTA :)', 'raw': 'Will allow to start the finetuning process on structure defined strictly by you. After finetuning, model will be saved in path provided in config, and you can check out if it performs better than the base one. Or even you can put it on llm Leaderboard to check if we do not have new SOTA :)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Have fun and have a great day <3', 'raw': 'Have fun and have a great day <3'}, {'type': 'new_line', 'raw': '\n'}]","GPU Poor POV: Dont be Afraid :D + +Sometimes we dont want to do something because of low self esteem, +I ofter hear 'its to hard for me','i am not an expert','i do not know how to do it', etc. These words are never the truth, we should not be afraid and try to build something because there is no additive value without a failure. + +Same things comes in LLMs, there is a lot of fancy words happening, but whats is more important is that there are also people who are constantly building so other can build. Diving into finetuning LLMs is incredibly simple if we assume using axolotl library and pretrains stored on huggingface. + +All we need is an idea, our GPU Poor desktop or colab notebooks and these steps: +``` +git clone https://github.com/OpenAccess-AI-Collective/axolotl +cd axolotl + +pip3 install packaging +pip3 install -e '.[flash-attn,deepspeed]' +``` +After installation process we can go to examples, and modify configs to our own needs. +Lets jump into +``` +axolotl\examples\llama-2\qlora.yml +``` +and change +``` +base_model: NousResearch/Llama-2-7b-hf +``` +to +``` +base_model: TinyLlama/TinyLlama-1.1B-Chat-v1.0 +``` +choose dataset from huge amounts of dataset that are possible to use from hf.co/datasets and tweak additional params like batch_size, number of epochs, how often do we want to save our model and many more (which I wont focus on rn). +Then, +``` +accelerate launch -m axolotl.cli.train examples/llama-2/qlora.yml +``` +Will allow to start the finetuning process on structure defined strictly by you. After finetuning, model will be saved in path provided in config, and you can check out if it performs better than the base one. Or even you can put it on llm Leaderboard to check if we do not have new SOTA :) +Have fun and have a great day <3 +",[],[],"[{'reaction': '👍', 'users': ['KnutJaegersberg', 'Csplk', 'miesnerjacob', 'victor', 'alazro', 'becstar', 'samusenps', 'ameerazam08', 'NeuralNovel', 'pro7357', 's3nh', 'Solshine', 'merve', 'aardoi', 'joujiboi', 'danielus', 'Ji-Ha'], 'count': 17}, {'reaction': '🤗', 'users': ['xpgx1', 'samusenps', 'ameerazam08', 'NeuralNovel', 'Solshine', 'clem', 's3nh', 'joujiboi'], 'count': 8}, {'reaction': '❤️', 'users': ['NeuralNovel', 'afrideva', 'clem', 'joujiboi'], 'count': 4}]",2024-01-22 12:20:35,2024-01-23 05:52:18.667,"[{'_id': '645cfe4603fc86c46b3e46d1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/645cfe4603fc86c46b3e46d1/-72dah0aoELhfwYwNJ6Ig.jpeg', 'fullname': 'Lee Jackson', 'name': 'NeuralNovel', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 60, 'isFollowing': False}, {'_id': '63eed5ff6c2c7c702bf89782', 'avatarUrl': '/avatars/50070b95473ad4819a1d2d7a2a7e4b7e.svg', 'fullname': 'afrideva', 'name': 'afrideva', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 60, 'isFollowing': False}, {'_id': '654527ce2a13610acc25d921', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/eXiGSHeBcKbFAQIhjiQ8r.jpeg', 'fullname': 'Caleb DeLeeuw', 'name': 'Solshine', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 14, 'isFollowing': False}, {'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}]",/posts/s3nh/589001331123431,81,,4 +https://cdn-avatars.huggingface.co/v1/production/uploads/5e7749883d77a72421292d07/M4AmBReZk_otxCIG3o0bL.jpeg,226.0,Gabriele Sarti,gsarti,514819721749841,"[{'type': 'text', 'value': ""🔍 Today's pick in Interpretability & Analysis of LMs: Towards Best Practices of Activation Patching in Language Models: Metrics and Methods by "", 'raw': ""🔍 Today's pick in Interpretability & Analysis of LMs: Towards Best Practices of Activation Patching in Language Models: Metrics and Methods by ""}, {'type': 'mention', 'user': 'm0pp11', 'raw': '@m0pp11'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'NeelNanda', 'raw': '@NeelNanda'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This work systematically examines the impact of methodological details in activation patching, a popular technique with causal guarantees to quantify the importance of model components in driving model predictions. Authors' recommendations include 1) using in-distribution counterfactual prompts instead of noise/zeroing to mitigate the OOD problem, 2) using logins instead of probabilities as evaluation metrics to enable the discovery of model components with negative influence on predictions, 3) accounting for interaction factors across layers when performing multi-layer patching; and 4) experiment with corrupting different prompt tokens to verify their agreement in the resulting discovered circuits."", 'raw': ""This work systematically examines the impact of methodological details in activation patching, a popular technique with causal guarantees to quantify the importance of model components in driving model predictions. Authors' recommendations include 1) using in-distribution counterfactual prompts instead of noise/zeroing to mitigate the OOD problem, 2) using logins instead of probabilities as evaluation metrics to enable the discovery of model components with negative influence on predictions, 3) accounting for interaction factors across layers when performing multi-layer patching; and 4) experiment with corrupting different prompt tokens to verify their agreement in the resulting discovered circuits.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📄 Paper: ', 'raw': '📄 Paper: '}, {'type': 'resource', 'resource': {'type': 'paper', 'id': '2309.16042'}, 'url': 'https://huggingface.co/papers/2309.16042', 'raw': 'https://huggingface.co/papers/2309.16042', 'label': 'Towards Best Practices of Activation Patching in Language Models:\n Metrics and Methods (2309.16042)'}]","🔍 Today's pick in Interpretability & Analysis of LMs: Towards Best Practices of Activation Patching in Language Models: Metrics and Methods by @m0pp11 and @NeelNanda + +This work systematically examines the impact of methodological details in activation patching, a popular technique with causal guarantees to quantify the importance of model components in driving model predictions. Authors' recommendations include 1) using in-distribution counterfactual prompts instead of noise/zeroing to mitigate the OOD problem, 2) using logins instead of probabilities as evaluation metrics to enable the discovery of model components with negative influence on predictions, 3) accounting for interaction factors across layers when performing multi-layer patching; and 4) experiment with corrupting different prompt tokens to verify their agreement in the resulting discovered circuits. + +📄 Paper: https://huggingface.co/papers/2309.16042","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/NS8HIORf_AR_QGtyuhJKx.png'}, {'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e7749883d77a72421292d07/yaySrpXin6AJ83pNoFu8C.png'}]","[{'_id': '64e81afb9a928410952bee09', 'avatarUrl': '/avatars/4542c83cabc680ec9cf4fbf8e3420ddd.svg', 'fullname': 'Fred Zhang', 'name': 'm0pp11', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 4}, {'_id': '62669380c8bc5cf80ca97350', 'avatarUrl': '/avatars/6d5cd2261163308b82341c1ce28984d1.svg', 'fullname': 'Neel Nanda', 'name': 'NeelNanda', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 61}]","[{'reaction': '❤️', 'users': ['clem', 'victor', 'osanseviero', 'lunarflu', 'clefourrier', 'm0pp11'], 'count': 6}]",2024-01-22 09:18:58,2024-01-22 09:18:58.671,[],/posts/gsarti/514819721749841,3,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg,3938.0,chansung park,chansung,968224951725556,"[{'type': 'text', 'value': 'Update on the Newsletter of 🤗 Daily Paper', 'raw': 'Update on the Newsletter of 🤗 Daily Paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Automatic Korean translation is integrated. In the newspaper, ""KO"" links appear, and it will bring you to the translated version of full paper. This is done with the following workflow.', 'raw': 'Automatic Korean translation is integrated. In the newspaper, ""KO"" links appear, and it will bring you to the translated version of full paper. This is done with the following workflow.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '1. Grasp the list of arXiv IDs from 🤗 Daily Paper API', 'raw': '1. Grasp the list of arXiv IDs from 🤗 Daily Paper API'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Distribute a number of sub-list of arXiv IDs to VMs (possibly spot instances since the job ends shortly)', 'raw': '2. Distribute a number of sub-list of arXiv IDs to VMs (possibly spot instances since the job ends shortly)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Commit & push the translated paper in HTML to the designated GitHub repository', 'raw': '3. Commit & push the translated paper in HTML to the designated GitHub repository'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '4. Newsletter will include the links to the HTML of each paper', 'raw': '4. Newsletter will include the links to the HTML of each paper'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Job distribution to a number of VMs are super easily done with [dstack]( ', 'raw': 'Job distribution to a number of VMs are super easily done with [dstack]( '}, {'type': 'link', 'href': 'https://dstack.ai/', 'raw': 'https://dstack.ai/'}, {'type': 'text', 'value': ' ), and the translation sub-workflow is done through 1) download PDF of each paper with arxiv-dl package, 2) PDF => text with nougat-ocr package, 3) a custom trained model( ', 'raw': ' ), and the translation sub-workflow is done through 1) download PDF of each paper with arxiv-dl package, 2) PDF => text with nougat-ocr package, 3) a custom trained model( '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'nlp-with-deeplearning/enko-t5-small-v0'}, 'url': 'https://huggingface.co/nlp-with-deeplearning/enko-t5-small-v0', 'raw': 'https://huggingface.co/nlp-with-deeplearning/enko-t5-small-v0'}, {'type': 'text', 'value': ' ) in 🤗 transformers to translate the English text into Korean line by line, and 4) reformat the translation into HTML.', 'raw': ' ) in 🤗 transformers to translate the English text into Korean line by line, and 4) reformat the translation into HTML.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Many people in Korea are not fluent in English but want to learn about new stuff in AI, so they usually use Google Translate or other services. This is why I made this feature for easier and direct access to the SOTA knowledge. ', 'raw': 'Many people in Korea are not fluent in English but want to learn about new stuff in AI, so they usually use Google Translate or other services. This is why I made this feature for easier and direct access to the SOTA knowledge. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Are there other countries with the similar needs? If so, it would be wonderful to cooperate to support more languages. Please reach out anyone is interested in this.', 'raw': 'Are there other countries with the similar needs? If so, it would be wonderful to cooperate to support more languages. Please reach out anyone is interested in this.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'PS; I always wanted to show the usefulness of open ML models by building a well working end to end product, and this newsletter shows it by featuring T5ForConditionalGeneration (translation), SOLAR LLM (summarization). ', 'raw': 'PS; I always wanted to show the usefulness of open ML models by building a well working end to end product, and this newsletter shows it by featuring T5ForConditionalGeneration (translation), SOLAR LLM (summarization). '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'if you want to sub to the newsletter', 'raw': 'if you want to sub to the newsletter'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ': ', 'raw': ': '}, {'type': 'link', 'href': 'https://groups.google.com/g/hf-daily-paper-newsletter', 'raw': 'https://groups.google.com/g/hf-daily-paper-newsletter'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'if you want to look into the source codes', 'raw': 'if you want to look into the source codes'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ': ', 'raw': ': '}, {'type': 'link', 'href': 'https://github.com/deep-diver/hf-daily-paper-newsletter', 'raw': 'https://github.com/deep-diver/hf-daily-paper-newsletter'}]","Update on the Newsletter of 🤗 Daily Paper + +Automatic Korean translation is integrated. In the newspaper, ""KO"" links appear, and it will bring you to the translated version of full paper. This is done with the following workflow. + +1. Grasp the list of arXiv IDs from 🤗 Daily Paper API +2. Distribute a number of sub-list of arXiv IDs to VMs (possibly spot instances since the job ends shortly) +3. Commit & push the translated paper in HTML to the designated GitHub repository +4. Newsletter will include the links to the HTML of each paper + +Job distribution to a number of VMs are super easily done with [dstack]( https://dstack.ai/ ), and the translation sub-workflow is done through 1) download PDF of each paper with arxiv-dl package, 2) PDF => text with nougat-ocr package, 3) a custom trained model( https://huggingface.co/nlp-with-deeplearning/enko-t5-small-v0 ) in 🤗 transformers to translate the English text into Korean line by line, and 4) reformat the translation into HTML. + +Many people in Korea are not fluent in English but want to learn about new stuff in AI, so they usually use Google Translate or other services. This is why I made this feature for easier and direct access to the SOTA knowledge. + +Are there other countries with the similar needs? If so, it would be wonderful to cooperate to support more languages. Please reach out anyone is interested in this. + +PS; I always wanted to show the usefulness of open ML models by building a well working end to end product, and this newsletter shows it by featuring T5ForConditionalGeneration (translation), SOLAR LLM (summarization). + +if you want to sub to the newsletter +: https://groups.google.com/g/hf-daily-paper-newsletter + +if you want to look into the source codes +: https://github.com/deep-diver/hf-daily-paper-newsletter","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60d3b57ad7b174177faabd6e/LBPLzrm2a5HeJo8aX4mqy.mp4'}]",[],"[{'reaction': '❤️', 'users': ['s3nh', 'peterschmidt85', 'aidystark', 'afrideva', 'clem', 'victor', 'samusenps', 'maywell', 'lunarflu', 'julien-c', 'shrijayan', 'ysharma', 'dillfrescott', 'JUNGU'], 'count': 14}, {'reaction': '👍', 'users': ['chansung', 'Siva1306', 'lunarflu', 'dillfrescott'], 'count': 4}]",2024-01-22 06:35:37,2024-01-23 00:09:20.259,"[{'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}, {'_id': '60d3b57ad7b174177faabd6e', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1659971187637-60d3b57ad7b174177faabd6e.jpeg', 'fullname': 'chansung park', 'name': 'chansung', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3938, 'isFollowing': False}]",/posts/chansung/968224951725556,74,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,569818073481626,"[{'type': 'text', 'value': 'Migrated all my GPU consuming Spaces to ZERO, it was super easy to do so (add three lines of code and voila!) and the start-up time decreased dramatically as well 💜 ', 'raw': 'Migrated all my GPU consuming Spaces to ZERO, it was super easy to do so (add three lines of code and voila!) and the start-up time decreased dramatically as well 💜 '}]","Migrated all my GPU consuming Spaces to ZERO, it was super easy to do so (add three lines of code and voila!) and the start-up time decreased dramatically as well 💜 ","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/6141a88b3a0ec78603c9e784/8cKjrMR_0vvuUqER0Qq2Z.png'}]",[],"[{'reaction': '👍', 'users': ['s3nh', 'NeuralNovel', 'SharathChenna', 'samusenps', 'jkopz', 'akhaliq', 'Chunte', 'chansung', 'aidystark', 'gbharti', 'osanseviero', 'abidlabs', 'clem', 'KnutJaegersberg', 'uratmangun', 'awinml', 'julien-c', 'pierrci', 'clefourrier', 'Norod78', 'Sentdex', 'mikr', 'cbensimon', 'tsukumijima', 'mahiatlinux', 'asad', 'Ryukijano', 'taesiri', 'Nymbo', 'leeloolee'], 'count': 30}, {'reaction': '🤗', 'users': ['aidystark', 'clem', 'awinml', 'Sentdex', 'cbensimon'], 'count': 5}, {'reaction': '🤯', 'users': ['clicktodev', 'Sentdex', 'Nick088'], 'count': 3}]",2024-01-21 17:40:12,2024-05-27 12:39:15.889,"[{'_id': '65ae41e84a0432592c82b76d', 'avatarUrl': '/avatars/012407ceab973b64438d27bb2a77eb83.svg', 'fullname': 'uratmangun', 'name': 'uratmangun', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '6032802e1f993496bc14d9e3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6032802e1f993496bc14d9e3/w6hr-DEQot4VVkoyRIBiy.png', 'fullname': 'Omar Sanseviero', 'name': 'osanseviero', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': True, 'isMod': False, 'followerCount': 3221, 'isFollowing': False}, {'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786, 'isFollowing': False}, {'_id': '6069de23a0e75b0dd0135620', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1617550879179-noauth.jpeg', 'fullname': 'Charles Bensimon', 'name': 'cbensimon', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 163, 'isFollowing': False}, {'_id': '5f5a3f689c5d501a5077e825', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1670088855038-5f5a3f689c5d501a5077e825.jpeg', 'fullname': 'Milan Kryl', 'name': 'mikr', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 12, 'isFollowing': False}, {'_id': '6393f04df7e70dd0166c004e', 'avatarUrl': '/avatars/acf4e9e0204a7ff7445aecc4102700cd.svg', 'fullname': 'Phi Nguyen', 'name': 'nxphi47', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 9, 'isFollowing': False}, {'_id': '644e6b4e030210812f4243cd', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/bS9sRo5qrlvDrO5GPm9jk.jpeg', 'fullname': 'MD AL AMIN', 'name': 'alamin655', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '65442284fcb96b8b485fa501', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/65442284fcb96b8b485fa501/ouNU_rDXH2Um4nzyRRA6C.png', 'fullname': 'Charlie Amalet', 'name': 'CharlieAmalet', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '64e23361607e093241e3c271', 'avatarUrl': '/avatars/2a1daf0fee29302c470cffa8e74a0fdb.svg', 'fullname': 'Yisol Choi', 'name': 'yisol', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 171, 'isFollowing': False}, {'_id': '65bec6e5c1a44b6ef148bfa3', 'avatarUrl': '/avatars/f23e27a5c89901d361e13a81416926e2.svg', 'fullname': 'Maheswar KK', 'name': 'mahiatlinux', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 27, 'isFollowing': False}, {'_id': '65a58a9ac980ec229658526c', 'avatarUrl': '/avatars/b048e69006cdcb843c34f2b65872104f.svg', 'fullname': 'Ashish', 'name': 'ashishakk', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '661ab1f1fa3b144a381fa454', 'avatarUrl': '/avatars/7e11b350db47f3ae82dc073f24833a02.svg', 'fullname': 'Urro', 'name': 'urroxyz', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}, {'_id': '664621c5fdb15ac0d10b4b91', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/noauth/eukuSyES1wKsf0bTKn4Mv.png', 'fullname': 'Rino Yet', 'name': 'R1n0', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/merve/569818073481626,1217,,17 +https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg,233.0,s3nh,s3nh,467122012534454,"[{'type': 'text', 'value': 'GPU Poor POV: My storytelling choices of the week', 'raw': 'GPU Poor POV: My storytelling choices of the week'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Its end of the week, I decided to summarize my observations in community based LLMs and mention few models in specific area which are very interesting and has capability to create some insightful stories despite of its relatively lightweight form.', 'raw': 'Its end of the week, I decided to summarize my observations in community based LLMs and mention few models in specific area which are very interesting and has capability to create some insightful stories despite of its relatively lightweight form.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I personally did not use LLMs in my daily routine to tasks like function calling, parsing or assist in code writing. What I tried to use for is storytelling, because it always amaze me how different these models comes to different preferred tasks.', 'raw': 'I personally did not use LLMs in my daily routine to tasks like function calling, parsing or assist in code writing. What I tried to use for is storytelling, because it always amaze me how different these models comes to different preferred tasks.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'How this model are able to generalize the stories and sometimes, how high level of creativity they carry.', 'raw': 'How this model are able to generalize the stories and sometimes, how high level of creativity they carry.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'BlueNipples/DaringLotus-v2-10.7b'}, 'url': 'https://huggingface.co/BlueNipples/DaringLotus-v2-10.7b', 'raw': 'https://huggingface.co/BlueNipples/DaringLotus-v2-10.7b'}, {'type': 'text', 'value': "" its main target is to generate prose. Quoting the author 'It shares it's good prose, and relatively decent coherency, being a little bit more on the side of prose, and a little bit less on the side of coherency. I like this model for generating great prose if I feel like regening a bit. '"", 'raw': "" its main target is to generate prose. Quoting the author 'It shares it's good prose, and relatively decent coherency, being a little bit more on the side of prose, and a little bit less on the side of coherency. I like this model for generating great prose if I feel like regening a bit. '""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/NeuralNovel/Aeryth-7B-v0.1', 'raw': 'https://huggingface.co/NeuralNovel/Aeryth-7B-v0.1'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'great work by ', 'raw': 'great work by '}, {'type': 'mention', 'user': 'NeuralNovel', 'raw': '@NeuralNovel'}, {'type': 'text', 'value': ' , I really like how flexible this model is, there is no strict focus on a certain role, so definitely worth a try. Would love to hear more about dataset on which was trained, afaik is private rn. best suited for Science Fiction, History & Romance genres due to the training data used.', 'raw': ' , I really like how flexible this model is, there is no strict focus on a certain role, so definitely worth a try. Would love to hear more about dataset on which was trained, afaik is private rn. best suited for Science Fiction, History & Romance genres due to the training data used.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'And the last one for today is ', 'raw': 'And the last one for today is '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'FPHam/Sydney_Pirate_Mistral_7b'}, 'url': 'https://huggingface.co/FPHam/Sydney_Pirate_Mistral_7b', 'raw': 'https://huggingface.co/FPHam/Sydney_Pirate_Mistral_7b'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'mention', 'user': 'FPHam', 'raw': '@FPHam'}, {'type': 'text', 'value': ' work always amaze me how the models are able to stick to provided role. awesome work as always, Ill for sure use this model to generate some interesting stories.', 'raw': ' work always amaze me how the models are able to stick to provided role. awesome work as always, Ill for sure use this model to generate some interesting stories.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I know that hype train is going fast but as I observe people here on huggingface are creating really creative models which are for sure worth to try. Have a great day <3', 'raw': 'I know that hype train is going fast but as I observe people here on huggingface are creating really creative models which are for sure worth to try. Have a great day <3'}]","GPU Poor POV: My storytelling choices of the week + +Its end of the week, I decided to summarize my observations in community based LLMs and mention few models in specific area which are very interesting and has capability to create some insightful stories despite of its relatively lightweight form. + +I personally did not use LLMs in my daily routine to tasks like function calling, parsing or assist in code writing. What I tried to use for is storytelling, because it always amaze me how different these models comes to different preferred tasks. + +How this model are able to generalize the stories and sometimes, how high level of creativity they carry. + +https://huggingface.co/BlueNipples/DaringLotus-v2-10.7b its main target is to generate prose. Quoting the author 'It shares it's good prose, and relatively decent coherency, being a little bit more on the side of prose, and a little bit less on the side of coherency. I like this model for generating great prose if I feel like regening a bit. ' + +https://huggingface.co/NeuralNovel/Aeryth-7B-v0.1 +great work by @NeuralNovel , I really like how flexible this model is, there is no strict focus on a certain role, so definitely worth a try. Would love to hear more about dataset on which was trained, afaik is private rn. best suited for Science Fiction, History & Romance genres due to the training data used. + +And the last one for today is https://huggingface.co/FPHam/Sydney_Pirate_Mistral_7b @FPHam work always amaze me how the models are able to stick to provided role. awesome work as always, Ill for sure use this model to generate some interesting stories. + +I know that hype train is going fast but as I observe people here on huggingface are creating really creative models which are for sure worth to try. Have a great day <3",[],"[{'_id': '632a0b93cf7d40df9b3cf674', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/632a0b93cf7d40df9b3cf674/mxRiCbPXNzTCu4cEIJO5X.png', 'fullname': 'FPHam', 'name': 'FPHam', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 163}, {'_id': '645cfe4603fc86c46b3e46d1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/645cfe4603fc86c46b3e46d1/-72dah0aoELhfwYwNJ6Ig.jpeg', 'fullname': 'Lee Jackson', 'name': 'NeuralNovel', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 60}]","[{'reaction': '👍', 'users': ['KnutJaegersberg', 'osanseviero', 'FlareRebellion', 'NeuralNovel', 'Dlbk', 'abdd199719', 'merve', 'samusenps', 'NovoCode', 'HuggyMonkey', 'dvilasuero', 'abidlabs', 'clem', 's3nh', 'pro7357'], 'count': 15}, {'reaction': '❤️', 'users': ['NeuralNovel', 'samusenps', 'NovoCode', 'afrideva', 'clem'], 'count': 5}]",2024-01-21 12:16:22,2024-01-25 22:05:00.474,"[{'_id': '63732ebbbd81fae2b3aaf3fb', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1669551186189-63732ebbbd81fae2b3aaf3fb.jpeg', 'fullname': 'Knut Jägersberg', 'name': 'KnutJaegersberg', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 296, 'isFollowing': False}, {'_id': '645cfe4603fc86c46b3e46d1', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/645cfe4603fc86c46b3e46d1/-72dah0aoELhfwYwNJ6Ig.jpeg', 'fullname': 'Lee Jackson', 'name': 'NeuralNovel', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 60, 'isFollowing': False}, {'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}, {'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '608b8bb39d7c9519b4adae19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png', 'fullname': 'Abubakar Abid', 'name': 'abidlabs', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 883, 'isFollowing': False}, {'_id': '632a0b93cf7d40df9b3cf674', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/632a0b93cf7d40df9b3cf674/mxRiCbPXNzTCu4cEIJO5X.png', 'fullname': 'FPHam', 'name': 'FPHam', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 163, 'isFollowing': False}]",/posts/s3nh/467122012534454,234,,7 +https://cdn-avatars.huggingface.co/v1/production/uploads/1671292294864-5e00e3bdcbfd036a99df0da3.jpeg,114.0,Doron Adler,Norod78,898169785250170,"[{'type': 'text', 'value': ""I've prepared a Google Colab notebook which allows you to play with interpolating between different people using IP-Adapter SDXL Face-ID Plus. "", 'raw': ""I've prepared a Google Colab notebook which allows you to play with interpolating between different people using IP-Adapter SDXL Face-ID Plus. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': '\n#Prepare a list t of num_of_results values between 0 and 1\nt_space = torch.linspace(0, 1, num_of_results)\nfor t in tqdm(t_space):\n mix_factor = t.item()\n # interpolate between the two face images \n image = (image1 * (1 - mix_factor) + image2 * mix_factor).astype(np.uint8)\n # interpolate between the two face embedding \n faceid_embeds = torch.lerp(faceid_embeds1, faceid_embeds2, t)\n #generate interpolated result\n images = ip_model.generate(prompt=prompt, negative_prompt=negative_prompt, face_image=image, faceid_embeds=faceid_embeds, shortcut=v2, num_samples=2, scale=scale, s_scale=s_scale, guidance_scale=guidance_scale, width=width, height=height, num_inference_steps=steps, seed=seed)\n ', 'raw': '```\n\n#Prepare a list t of num_of_results values between 0 and 1\nt_space = torch.linspace(0, 1, num_of_results)\nfor t in tqdm(t_space):\n mix_factor = t.item()\n # interpolate between the two face images \n image = (image1 * (1 - mix_factor) + image2 * mix_factor).astype(np.uint8)\n # interpolate between the two face embedding \n faceid_embeds = torch.lerp(faceid_embeds1, faceid_embeds2, t)\n #generate interpolated result\n images = ip_model.generate(prompt=prompt, negative_prompt=negative_prompt, face_image=image, faceid_embeds=faceid_embeds, shortcut=v2, num_samples=2, scale=scale, s_scale=s_scale, guidance_scale=guidance_scale, width=width, height=height, num_inference_steps=steps, seed=seed)\n \n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to notebook:', 'raw': 'Link to notebook:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Norod78/face_id_v2_test_code'}, 'url': 'https://colab.research.google.com/#fileId=https%3A//huggingface.co/datasets/Norod78/face_id_v2_test_code/blob/main/norod78_faceid_sdxl_plus_v2_test.ipynb', 'raw': 'https://colab.research.google.com/#fileId=https%3A//huggingface.co/datasets/Norod78/face_id_v2_test_code/blob/main/norod78_faceid_sdxl_plus_v2_test.ipynb'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to Face-ID Repo:', 'raw': 'Link to Face-ID Repo:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'h94/IP-Adapter-FaceID'}, 'url': 'https://huggingface.co/h94/IP-Adapter-FaceID', 'raw': 'https://huggingface.co/h94/IP-Adapter-FaceID'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Link to all sorts of generated examples (Use the file tab):', 'raw': 'Link to all sorts of generated examples (Use the file tab):'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'Norod78/face_id_v2_test_code'}, 'url': 'https://huggingface.co/datasets/Norod78/face_id_v2_test_code/tree/main/sdxl_plus_v2_outputs', 'raw': 'https://huggingface.co/datasets/Norod78/face_id_v2_test_code/tree/main/sdxl_plus_v2_outputs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","I've prepared a Google Colab notebook which allows you to play with interpolating between different people using IP-Adapter SDXL Face-ID Plus. + +``` + +#Prepare a list t of num_of_results values between 0 and 1 +t_space = torch.linspace(0, 1, num_of_results) +for t in tqdm(t_space): + mix_factor = t.item() + # interpolate between the two face images + image = (image1 * (1 - mix_factor) + image2 * mix_factor).astype(np.uint8) + # interpolate between the two face embedding + faceid_embeds = torch.lerp(faceid_embeds1, faceid_embeds2, t) + #generate interpolated result + images = ip_model.generate(prompt=prompt, negative_prompt=negative_prompt, face_image=image, faceid_embeds=faceid_embeds, shortcut=v2, num_samples=2, scale=scale, s_scale=s_scale, guidance_scale=guidance_scale, width=width, height=height, num_inference_steps=steps, seed=seed) + +``` + +Link to notebook: +https://colab.research.google.com/#fileId=https%3A//huggingface.co/datasets/Norod78/face_id_v2_test_code/blob/main/norod78_faceid_sdxl_plus_v2_test.ipynb + +Link to Face-ID Repo: +https://huggingface.co/h94/IP-Adapter-FaceID + +Link to all sorts of generated examples (Use the file tab): +https://huggingface.co/datasets/Norod78/face_id_v2_test_code/tree/main/sdxl_plus_v2_outputs + +","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e00e3bdcbfd036a99df0da3/6XVam08oGvebcHqDEkx4P.qt'}, {'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5e00e3bdcbfd036a99df0da3/_V0wHMxqlqBY67NnQSqn5.mp4'}]",[],"[{'reaction': '🤯', 'users': ['osanseviero', 'linoyts', 'clem', 'merve', 'HuggyMonkey', 'tonyassi'], 'count': 6}, {'reaction': '❤️', 'users': ['s3nh', 'clem', 'kevinstonge', 'samusenps', 'tonyassi', 'fahim1233'], 'count': 6}, {'reaction': '👍', 'users': ['abdd199719'], 'count': 1}]",2024-01-21 09:24:07,2024-03-19 08:54:34.138,"[{'_id': '61caeda441f9432649f03ab6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/61caeda441f9432649f03ab6/IazJBCi7Cr34DgZXZeI4k.jpeg', 'fullname': 's3nh', 'name': 's3nh', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 233, 'isFollowing': False}, {'_id': '5e00e3bdcbfd036a99df0da3', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1671292294864-5e00e3bdcbfd036a99df0da3.jpeg', 'fullname': 'Doron Adler', 'name': 'Norod78', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 114, 'isFollowing': False}, {'_id': '638f308fc4444c6ca870b60a', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/638f308fc4444c6ca870b60a/Q11NK-8-JbiilJ-vk2LAR.png', 'fullname': 'Linoy Tsaban', 'name': 'linoyts', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 307, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}, {'_id': '65f7cefede5e636ca2df412e', 'avatarUrl': '/avatars/3ae664485d8df12c9b0e7b91e8d0457b.svg', 'fullname': 'maxells', 'name': 'maksim717', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'isFollowing': False}]",/posts/Norod78/898169785250170,2231,,9 +https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png,883.0,Abubakar Abid,abidlabs,986151192658340,"[{'type': 'text', 'value': ""There's a lot of interest in machine learning models that generate 3D objects, so Gradio now supports previewing STL files natively in the "", 'raw': ""There's a lot of interest in machine learning models that generate 3D objects, so Gradio now supports previewing STL files natively in the ""}, {'type': 'inline_code', 'code': 'Model3D', 'raw': '`Model3D`'}, {'type': 'text', 'value': ' component. Huge thanks to ', 'raw': ' component. Huge thanks to '}, {'type': 'inline_code', 'code': 'Monius', 'raw': '`Monius`'}, {'type': 'text', 'value': ' for the contribution 🔥🔥', 'raw': ' for the contribution 🔥🔥'}, {'type': 'new_line', 'raw': '\n'}]","There's a lot of interest in machine learning models that generate 3D objects, so Gradio now supports previewing STL files natively in the `Model3D` component. Huge thanks to `Monius` for the contribution 🔥🔥 +","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/608b8bb39d7c9519b4adae19/td-cW9V5W_e4oMke2ElqX.png'}]",[],"[{'reaction': '❤️', 'users': ['samusenps', 'Maykeye', 'osanseviero', 'AIIAR', 'Dlbk', 'clem', 'rreed', 'linoyts', 'johko', 'Chunte', 'HuggyMonkey', 'lunarflu', 'ciCic', 'AgastyaPatel', 'marianbasti', 'goendalf666', 'sugatoray', 'conceptron', 'neha4bus'], 'count': 19}, {'reaction': '🤗', 'users': ['samusenps', 'AIIAR', 'clem', 'abidlabs', 'taufiqdp', 'NeuralNovel', 'Chunte', 'lunarflu'], 'count': 8}]",2024-01-21 01:05:22,2024-01-21 12:00:04.408,"[{'_id': '65acc3f268139e3c422bb436', 'avatarUrl': '/avatars/73b36acc7734d31784b9311cd5e143f2.svg', 'fullname': 'sayed ismail', 'name': 'sayedismail', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2, 'isFollowing': False}, {'_id': '5e67bdd61009063689407479', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1583857146757-5e67bdd61009063689407479.jpeg', 'fullname': 'Clem 🤗', 'name': 'clem', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2489, 'isFollowing': False}]",/posts/abidlabs/986151192658340,47,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg,7786.0,merve,merve,533880363228237,"[{'type': 'text', 'value': 'Last month was great for faster/smaller segmentation models, and I wanted to dedicate my first post to compile the recently released SAM variants! 🤗', 'raw': 'Last month was great for faster/smaller segmentation models, and I wanted to dedicate my first post to compile the recently released SAM variants! 🤗'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📚 All models and their demos can be found in this collection 👉🏼 ', 'raw': '📚 All models and their demos can be found in this collection 👉🏼 '}, {'type': 'resource', 'resource': {'type': 'collection', 'id': 'merve/segment-anything-model-6585835fc76915aa14e2bcbd'}, 'url': 'https://huggingface.co/collections/merve/segment-anything-model-6585835fc76915aa14e2bcbd', 'raw': 'https://huggingface.co/collections/merve/segment-anything-model-6585835fc76915aa14e2bcbd'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The ideas behind them are mostly about making heavy image encoder lighter either through distillation or changing the pre-training. 💡', 'raw': 'The ideas behind them are mostly about making heavy image encoder lighter either through distillation or changing the pre-training. 💡'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡️MobileSAM: It decouples the heavy image encoder of SAM and distills it into a TinyViT to make SAM smaller. The architecture is same except for the encoder.', 'raw': '⚡️MobileSAM: It decouples the heavy image encoder of SAM and distills it into a TinyViT to make SAM smaller. The architecture is same except for the encoder.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡️TinySAM: It distills the whole model with online hard prompt sampling. The authors also quantized it and released Q-TinySAM. ', 'raw': '⚡️TinySAM: It distills the whole model with online hard prompt sampling. The authors also quantized it and released Q-TinySAM. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '⚡️ EfficientSAM: This model combines masked image pre-training for training lightweight image encoders (like ViTMAE, learns to reconstruct the images) and mask decoder.', 'raw': '⚡️ EfficientSAM: This model combines masked image pre-training for training lightweight image encoders (like ViTMAE, learns to reconstruct the images) and mask decoder.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""⚡️ FastSAM: It's a CNN-based model where the problem is modeled as segments generation. The inference takes place as everything is segmented at once and then you can prompt with boxes or points or text (and this is how it is similar to SAM). So the architecture is nowhere similar to original SAM itself. "", 'raw': ""⚡️ FastSAM: It's a CNN-based model where the problem is modeled as segments generation. The inference takes place as everything is segmented at once and then you can prompt with boxes or points or text (and this is how it is similar to SAM). So the architecture is nowhere similar to original SAM itself. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""✨ [NEW]\xa0SlimSAM: It's a pruned-distilled version of pre-trained SAM. The architecture is same so "", 'raw': ""✨ [NEW]\xa0SlimSAM: It's a pruned-distilled version of pre-trained SAM. The architecture is same so ""}, {'type': 'mention', 'user': 'nielsr', 'raw': '@nielsr'}, {'type': 'text', 'value': ' recently converted the weights and you can use it with the same API you use with SAM models. You can find the available checkpoints in the collection.', 'raw': ' recently converted the weights and you can use it with the same API you use with SAM models. You can find the available checkpoints in the collection.'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I hope you liked it! ', 'raw': 'I hope you liked it! '}]","Last month was great for faster/smaller segmentation models, and I wanted to dedicate my first post to compile the recently released SAM variants! 🤗 +📚 All models and their demos can be found in this collection 👉🏼 https://huggingface.co/collections/merve/segment-anything-model-6585835fc76915aa14e2bcbd +The ideas behind them are mostly about making heavy image encoder lighter either through distillation or changing the pre-training. 💡 +⚡️MobileSAM: It decouples the heavy image encoder of SAM and distills it into a TinyViT to make SAM smaller. The architecture is same except for the encoder. +⚡️TinySAM: It distills the whole model with online hard prompt sampling. The authors also quantized it and released Q-TinySAM. +⚡️ EfficientSAM: This model combines masked image pre-training for training lightweight image encoders (like ViTMAE, learns to reconstruct the images) and mask decoder. +⚡️ FastSAM: It's a CNN-based model where the problem is modeled as segments generation. The inference takes place as everything is segmented at once and then you can prompt with boxes or points or text (and this is how it is similar to SAM). So the architecture is nowhere similar to original SAM itself. +✨ [NEW] SlimSAM: It's a pruned-distilled version of pre-trained SAM. The architecture is same so @nielsr recently converted the weights and you can use it with the same API you use with SAM models. You can find the available checkpoints in the collection. +I hope you liked it! ",[],"[{'_id': '5f1158120c833276f61f1a84', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1608042047613-5f1158120c833276f61f1a84.jpeg', 'fullname': 'Niels Rogge', 'name': 'nielsr', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 904}]","[{'reaction': '❤️', 'users': ['osanseviero', 'lopezjhonf', 'NemesisAlm', 'amyeroberts', 'julien-c', 'mattmdjaga', 'KnutJaegersberg', 'bisnotforbella', 'AdinaY', 'samusenps', 'abidlabs'], 'count': 11}, {'reaction': '👍', 'users': ['Norod78', 'julien-c', 'KnutJaegersberg', 'mlabonne', 'AdinaY', 'sbrandeis'], 'count': 6}]",2024-01-10 19:58:41,2024-01-10 19:59:42.650,[],/posts/merve/533880363228237,47,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg,334.0,Daniel Vila,dvilasuero,462914041098598,"[{'type': 'text', 'value': '🔥 Less is more for DPO, high quality matters!', 'raw': '🔥 Less is more for DPO, high quality matters!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '📢 Dropping our first open dataset and LLM of the year:', 'raw': '📢 Dropping our first open dataset and LLM of the year:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '💾Meet distilabel Orca Pairs DPO, an improved version of the now famous dataset from Intel:', 'raw': '💾Meet distilabel Orca Pairs DPO, an improved version of the now famous dataset from Intel:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'argilla/distilabel-intel-orca-dpo-pairs'}, 'url': 'https://huggingface.co/datasets/argilla/distilabel-intel-orca-dpo-pairs', 'raw': 'https://huggingface.co/datasets/argilla/distilabel-intel-orca-dpo-pairs'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ' ', 'raw': ' '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '🏛️ And a new OpenHermes fine-tune outperforming baselines with 54% less DPO pairs:', 'raw': '🏛️ And a new OpenHermes fine-tune outperforming baselines with 54% less DPO pairs:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/argilla/distilabeled-Hermes-2.5-Mistral-7B', 'raw': 'https://huggingface.co/argilla/distilabeled-Hermes-2.5-Mistral-7B'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'You can use this new dataset for your DPO tuning, just like this:', 'raw': 'You can use this new dataset for your DPO tuning, just like this:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'code_fence', 'code': 'from datasets import load_dataset\n\n# Instead of this:\n# dataset = load_dataset(""Intel/orca_dpo_pairs"", split=""train"")\n\n# use this:\ndataset = load_dataset(""argilla/distilabel-intel-orca-dpo-pairs"", split=""train"")\n\ndataset = dataset.filter(\n lambda r: \n r[""status""] != ""tie"" and \n r[""chosen_score""] >= 8 and \n not r[""in_gsm8k_train""]\n)', 'raw': '```\nfrom datasets import load_dataset\n\n# Instead of this:\n# dataset = load_dataset(""Intel/orca_dpo_pairs"", split=""train"")\n\n# use this:\ndataset = load_dataset(""argilla/distilabel-intel-orca-dpo-pairs"", split=""train"")\n\ndataset = dataset.filter(\n lambda r: \n r[""status""] != ""tie"" and \n r[""chosen_score""] >= 8 and \n not r[""in_gsm8k_train""]\n)\n```'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This will reduce the size of the original by 54% while giving you better quality preferences!', 'raw': 'This will reduce the size of the original by 54% while giving you better quality preferences!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'What should we build next?', 'raw': 'What should we build next?'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}]","🔥 Less is more for DPO, high quality matters! + +📢 Dropping our first open dataset and LLM of the year: + +💾Meet distilabel Orca Pairs DPO, an improved version of the now famous dataset from Intel: + +https://huggingface.co/datasets/argilla/distilabel-intel-orca-dpo-pairs + + +🏛️ And a new OpenHermes fine-tune outperforming baselines with 54% less DPO pairs: + +https://huggingface.co/argilla/distilabeled-Hermes-2.5-Mistral-7B + +You can use this new dataset for your DPO tuning, just like this: + + +``` +from datasets import load_dataset + +# Instead of this: +# dataset = load_dataset(""Intel/orca_dpo_pairs"", split=""train"") + +# use this: +dataset = load_dataset(""argilla/distilabel-intel-orca-dpo-pairs"", split=""train"") + +dataset = dataset.filter( + lambda r: + r[""status""] != ""tie"" and + r[""chosen_score""] >= 8 and + not r[""in_gsm8k_train""] +) +``` +This will reduce the size of the original by 54% while giving you better quality preferences! + +What should we build next? + + + +",[],[],"[{'reaction': '❤️', 'users': ['davanstrien', 'osanseviero', 'merve', 'radames', 'tomaarsen', 'mlabonne', 'alielfilali01', 'clem', 'linoyts', 'KnutJaegersberg', 'Forbu14', 'lysandre', 'd0rj', 'mammour', 'VictorSanh', 'gblazex', 'sethuiyer', 'eliebak', 'sbrandeis'], 'count': 19}, {'reaction': '🤯', 'users': ['davanstrien', 'osanseviero', 'merve', 'radames', 'tomaarsen', 'BramVanroy', 'clem', 'KnutJaegersberg', 'lixin67'], 'count': 9}, {'reaction': '👍', 'users': ['hushell'], 'count': 1}]",2024-01-10 18:43:17,2024-01-21 19:26:53.267,"[{'_id': '63e77e7ae02ee67e8e50a541', 'avatarUrl': '/avatars/41a31c90d737660e8293cb8be98c4d18.svg', 'fullname': 'Blaze', 'name': 'gblazex', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 13, 'isFollowing': False}, {'_id': '60420dccc15e823a685f2b03', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg', 'fullname': 'Daniel Vila', 'name': 'dvilasuero', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 334, 'isFollowing': False}]",/posts/dvilasuero/462914041098598,59,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg,2738.0,Julien Chaumond,julien-c,428151014489564,"[{'type': 'text', 'value': 'Finally found my go-to hat for 2024 😎', 'raw': 'Finally found my go-to hat for 2024 😎'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Thanks to ', 'raw': 'Thanks to '}, {'type': 'resource', 'resource': {'type': 'userOrOrg', 'id': 'fal-ai'}, 'url': 'https://huggingface.co/fal-ai', 'raw': 'https://huggingface.co/fal-ai'}]","Finally found my go-to hat for 2024 😎 + +Thanks to https://huggingface.co/fal-ai","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/5dd96eb166059660ed1ee413/-DNSnqBnqNk-ibyGrs9D_.jpeg'}]",[],"[{'reaction': '❤️', 'users': ['dvilasuero', 'burkaygur', 'radames', 'merve', 'pierrci', 'not-lain', 'clem', 'clefourrier', 'samusenps', 'MarinaraSpaghetti', 'fedyanin', 'dillfrescott', 'roseking', 'sbrandeis'], 'count': 14}, {'reaction': '🤝', 'users': ['lysandre', 'merve', 'clem', 'roseking', 'dillfrescott'], 'count': 5}, {'reaction': '👍', 'users': ['Norod78', 'clem', 'roseking', 'dillfrescott'], 'count': 4}]",2024-01-10 18:33:22,2024-02-01 02:50:42.313,"[{'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}, {'_id': '60fb1e6abfa81e553cc5c722', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60fb1e6abfa81e553cc5c722/w1Okfm9m48VaEEr_pAdaY.png', 'fullname': 'Burkay Gur', 'name': 'burkaygur', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 43, 'isFollowing': False}, {'_id': '60d0a6d6aa029cd08c6437f6', 'avatarUrl': '/avatars/273805bcfd1cc8a88395358c78e9695b.svg', 'fullname': 'Pierre-Antoine Passet', 'name': 'pierreant-p', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7, 'isFollowing': False}, {'_id': '6141a88b3a0ec78603c9e784', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6141a88b3a0ec78603c9e784/DJsxSmWV39M33JFheLobC.jpeg', 'fullname': 'merve', 'name': 'merve', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 7786, 'isFollowing': False}, {'_id': '646b5b503e2a7b065946ffb9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/646b5b503e2a7b065946ffb9/nccWfcReOfbAac6KO_u5r.jpeg', 'fullname': 'Kirill', 'name': 'fedyanin', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 11, 'isFollowing': False}, {'_id': '6215ce9abfcb3893344dd0a2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6215ce9abfcb3893344dd0a2/3afbq35YvIPjbNFi-J7TO.png', 'fullname': 'Cross', 'name': 'dillfrescott', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 53, 'isFollowing': False}]",/posts/julien-c/428151014489564,64,,8 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,476586287487680,"[{'type': 'text', 'value': 'Here is my selection of papers for today (10 Jan)', 'raw': 'Here is my selection of papers for today (10 Jan)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/papers', 'raw': 'https://huggingface.co/papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Jump Cut Smoothing for Talking Heads', 'raw': 'Jump Cut Smoothing for Talking Heads'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'FADI-AEC: Fast Score Based Diffusion Model Guided by Far-end Signal for Acoustic Echo Cancellation', 'raw': 'FADI-AEC: Fast Score Based Diffusion Model Guided by Far-end Signal for Acoustic Echo Cancellation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Masked Audio Generation using a Single Non-Autoregressive Transformer', 'raw': 'Masked Audio Generation using a Single Non-Autoregressive Transformer'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Let's Go Shopping (LGS) -- Web-Scale Image-Text Dataset for Visual Concept Understanding"", 'raw': ""Let's Go Shopping (LGS) -- Web-Scale Image-Text Dataset for Visual Concept Understanding""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Narrowing the Knowledge Evaluation Gap: Open-Domain Question Answering with Multi-Granularity Answers', 'raw': 'Narrowing the Knowledge Evaluation Gap: Open-Domain Question Answering with Multi-Granularity Answers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Lightning Attention-2: A Free Lunch for Handling Unlimited Sequence Lengths in Large Language Models', 'raw': 'Lightning Attention-2: A Free Lunch for Handling Unlimited Sequence Lengths in Large Language Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Chain-of-Table: Evolving Tables in the Reasoning Chain for Table Understanding', 'raw': 'Chain-of-Table: Evolving Tables in the Reasoning Chain for Table Understanding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MagicVideo-V2: Multi-Stage High-Aesthetic Video Generation', 'raw': 'MagicVideo-V2: Multi-Stage High-Aesthetic Video Generation'}]","Here is my selection of papers for today (10 Jan) + +https://huggingface.co/papers + +Jump Cut Smoothing for Talking Heads + +FADI-AEC: Fast Score Based Diffusion Model Guided by Far-end Signal for Acoustic Echo Cancellation + +Masked Audio Generation using a Single Non-Autoregressive Transformer + +Let's Go Shopping (LGS) -- Web-Scale Image-Text Dataset for Visual Concept Understanding + +Narrowing the Knowledge Evaluation Gap: Open-Domain Question Answering with Multi-Granularity Answers + +Lightning Attention-2: A Free Lunch for Handling Unlimited Sequence Lengths in Large Language Models + +Chain-of-Table: Evolving Tables in the Reasoning Chain for Table Understanding + +MagicVideo-V2: Multi-Stage High-Aesthetic Video Generation","[{'type': 'video', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/P-zl6ha7fRwP9pmOApfhn.qt'}]",[],"[{'reaction': '❤️', 'users': ['not-lain', 'pierrci', 'julien-c', 'abidlabs', 'davanstrien', 'osanseviero'], 'count': 6}, {'reaction': '🤗', 'users': ['not-lain', 'julien-c', 'abidlabs', 'davanstrien', 'alielfilali01'], 'count': 5}]",2024-01-10 15:11:03,2024-01-10 18:02:54.693,"[{'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}]",/posts/akhaliq/476586287487680,15,,1 +https://cdn-avatars.huggingface.co/v1/production/uploads/64b7e345f92b20f7a38bf47a/9ZZdzuTlSryjnN5_Bx_n-.jpeg,61.0,Farouk,pharaouk,379937660970830,"[{'type': 'text', 'value': 'hello world! ', 'raw': 'hello world! '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""we're starting a new recurring event/club where we read and implement cool ai papers on skunkworks discord. first paper we chose is self-play as there are a lot of opportunities to expand on this framework, here's the link for the event: "", 'raw': ""we're starting a new recurring event/club where we read and implement cool ai papers on skunkworks discord. first paper we chose is self-play as there are a lot of opportunities to expand on this framework, here's the link for the event: ""}, {'type': 'link', 'href': 'https://discord.gg/eAgBr7Fy?event=1194392774905172030', 'raw': 'https://discord.gg/eAgBr7Fy?event=1194392774905172030'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'im plannin my next post to be a technical deepdive of PCN and ProspectiveConfiguration algo as ive been spending the last few days getting a good grasp at this promising alternative to BP, stay tuned.', 'raw': 'im plannin my next post to be a technical deepdive of PCN and ProspectiveConfiguration algo as ive been spending the last few days getting a good grasp at this promising alternative to BP, stay tuned.'}]","hello world! +we're starting a new recurring event/club where we read and implement cool ai papers on skunkworks discord. first paper we chose is self-play as there are a lot of opportunities to expand on this framework, here's the link for the event: https://discord.gg/eAgBr7Fy?event=1194392774905172030 + +im plannin my next post to be a technical deepdive of PCN and ProspectiveConfiguration algo as ive been spending the last few days getting a good grasp at this promising alternative to BP, stay tuned.",[],[],"[{'reaction': '❤️', 'users': ['osanseviero', 'Tonic', 'victor', 'pierrci', 'pcuenq', 'merve', 'samusenps', 'sbrandeis'], 'count': 8}]",2024-01-09 21:40:58,2024-07-13 06:01:19.763,"[{'_id': '5fcfb7c407408029ba3577e2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1608146735109-5fcfb7c407408029ba3577e2.png', 'fullname': 'Simon Brandeis', 'name': 'sbrandeis', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 170, 'isFollowing': False}, {'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}, {'_id': '608b8bb39d7c9519b4adae19', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1621947938344-noauth.png', 'fullname': 'Abubakar Abid', 'name': 'abidlabs', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 883, 'isFollowing': False}]",/posts/pharaouk/379937660970830,655,,3 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,197374136323416,"[{'type': 'text', 'value': 'Here is my selection of papers for today (9 Jan) ', 'raw': 'Here is my selection of papers for today (9 Jan) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/papers', 'raw': 'https://huggingface.co/papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AGG: Amortized Generative 3D Gaussians for Single Image to 3D', 'raw': 'AGG: Amortized Generative 3D Gaussians for Single Image to 3D'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'MoE-Mamba: Efficient Selective State Space Models with Mixture of Experts', 'raw': 'MoE-Mamba: Efficient Selective State Space Models with Mixture of Experts'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DiarizationLM: Speaker Diarization Post-Processing with Large Language Models', 'raw': 'DiarizationLM: Speaker Diarization Post-Processing with Large Language Models'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'TeleChat Technical Report', 'raw': 'TeleChat Technical Report'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Soaring from 4K to 400K: Extending LLM's Context with Activation Beacon"", 'raw': ""Soaring from 4K to 400K: Extending LLM's Context with Activation Beacon""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'AST-T5: Structure-Aware Pretraining for Code Generation and Understanding', 'raw': 'AST-T5: Structure-Aware Pretraining for Code Generation and Understanding'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Has Your Pretrained Model Improved? A Multi-head Posterior Based Approach', 'raw': 'Has Your Pretrained Model Improved? A Multi-head Posterior Based Approach'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Blending Is All You Need: Cheaper, Better Alternative to Trillion-Parameters LLM', 'raw': 'Blending Is All You Need: Cheaper, Better Alternative to Trillion-Parameters LLM'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'GPT-4V(ision) is a Human-Aligned Evaluator for Text-to-3D Generation', 'raw': 'GPT-4V(ision) is a Human-Aligned Evaluator for Text-to-3D Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'CRUXEval: A Benchmark for Code Reasoning, Understanding and Execution', 'raw': 'CRUXEval: A Benchmark for Code Reasoning, Understanding and Execution'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Mixtral of Experts', 'raw': 'Mixtral of Experts'}]","Here is my selection of papers for today (9 Jan) + +https://huggingface.co/papers + +AGG: Amortized Generative 3D Gaussians for Single Image to 3D + +MoE-Mamba: Efficient Selective State Space Models with Mixture of Experts + +DiarizationLM: Speaker Diarization Post-Processing with Large Language Models + +TeleChat Technical Report + +Soaring from 4K to 400K: Extending LLM's Context with Activation Beacon + +AST-T5: Structure-Aware Pretraining for Code Generation and Understanding + +Has Your Pretrained Model Improved? A Multi-head Posterior Based Approach + +Blending Is All You Need: Cheaper, Better Alternative to Trillion-Parameters LLM + +GPT-4V(ision) is a Human-Aligned Evaluator for Text-to-3D Generation + +CRUXEval: A Benchmark for Code Reasoning, Understanding and Execution + +Mixtral of Experts","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/yWeF_5kNh6wvsE2r8-X0e.png'}]",[],"[{'reaction': '👍', 'users': ['mahwizzzz', 'alckasoc', 'Tonic', 'victor', 'merve', 'davanstrien', 'IlyasMoutawwakil', 'lixin67', 'aust-t'], 'count': 9}]",2024-01-09 15:31:52,2024-01-09 15:32:01.396,[],/posts/akhaliq/197374136323416,8,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/641dfddf3bae5a77636817c5/2IwNwh9kK98eCHUmOGoWD.png,3369.0,wing lian,winglian,204586934082310,"[{'type': 'text', 'value': ""Full fine-tuning of Microsoft's Phi2 on a single 4090 is now supported in axolotl. Thanks to "", 'raw': ""Full fine-tuning of Microsoft's Phi2 on a single 4090 is now supported in axolotl. Thanks to ""}, {'type': 'mention', 'user': 'abacaj', 'raw': '@abacaj'}, {'type': 'text', 'value': ' and ', 'raw': ' and '}, {'type': 'mention', 'user': 'vikhyatk', 'raw': '@vikhyatk'}, {'type': 'text', 'value': ' for their help with gradient checkpointing and flash attention fixes. ', 'raw': ' for their help with gradient checkpointing and flash attention fixes. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'alpaca finetune: ', 'raw': 'alpaca finetune: '}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'openaccess-ai-collective/phi2-alpaca'}, 'url': 'https://huggingface.co/openaccess-ai-collective/phi2-alpaca', 'raw': 'https://huggingface.co/openaccess-ai-collective/phi2-alpaca'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'wandb: ', 'raw': 'wandb: '}, {'type': 'link', 'href': 'https://wandb.ai/oaaic/phi2/runs/00pc4ugb?workspace=user-wing-lian', 'raw': 'https://wandb.ai/oaaic/phi2/runs/00pc4ugb?workspace=user-wing-lian'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'merged PR: ', 'raw': 'merged PR: '}, {'type': 'link', 'href': 'https://github.com/OpenAccess-AI-Collective/axolotl/pull/1058', 'raw': 'https://github.com/OpenAccess-AI-Collective/axolotl/pull/1058'}, {'type': 'new_line', 'raw': '\n'}]","Full fine-tuning of Microsoft's Phi2 on a single 4090 is now supported in axolotl. Thanks to @abacaj and @vikhyatk for their help with gradient checkpointing and flash attention fixes. + +alpaca finetune: https://huggingface.co/openaccess-ai-collective/phi2-alpaca +wandb: https://wandb.ai/oaaic/phi2/runs/00pc4ugb?workspace=user-wing-lian +merged PR: https://github.com/OpenAccess-AI-Collective/axolotl/pull/1058 +",[],"[{'_id': '62ceeb27e7f6014c0e9d9268', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1677644903536-62ceeb27e7f6014c0e9d9268.jpeg', 'fullname': 'Anton Bacaj', 'name': 'abacaj', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 73}, {'_id': '63117568fa95534e218da163', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/63117568fa95534e218da163/8h9zN8aKRxPLBnXW7sqY9.jpeg', 'fullname': 'Vik Korrapati', 'name': 'vikhyatk', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 552}]","[{'reaction': '👍', 'users': ['osanseviero', 'Citaman', 'victor', 'kramp', 'Tonic', 'marcsun13', 'merve', 'ybelkada', 'Zmu', 'samusenps', 'mrfakename', 'Chunte', 'JaiSurya', 'radames', 'attashe'], 'count': 15}, {'reaction': '❤️', 'users': ['osanseviero', 'Tonic', 'merve', 'ybelkada', 'Chunte', 'radames', 'SicariusSicariiStuff', 'sbrandeis'], 'count': 8}]",2024-01-08 19:21:15,2024-07-13 06:01:19.747,"[{'_id': '5fcfb7c407408029ba3577e2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1608146735109-5fcfb7c407408029ba3577e2.png', 'fullname': 'Simon Brandeis', 'name': 'sbrandeis', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 170, 'isFollowing': False}, {'_id': '641dfddf3bae5a77636817c5', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/641dfddf3bae5a77636817c5/2IwNwh9kK98eCHUmOGoWD.png', 'fullname': 'wing lian', 'name': 'winglian', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 3369, 'isFollowing': False}, {'_id': '62e54f0eae9d3f10acb95cb9', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62e54f0eae9d3f10acb95cb9/VAyk05hqB3OZWXEZW-B0q.png', 'fullname': 'mrfakename', 'name': 'mrfakename', 'type': 'user', 'isPro': True, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 2157, 'isFollowing': False}, {'_id': '6538119803519fddb4a17e10', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6538119803519fddb4a17e10/ffJMkdx-rM7VvLTCM6ri_.jpeg', 'fullname': 'samusenps', 'name': 'samusenps', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 109, 'isFollowing': False}, {'_id': '6331f59718711776b46afb5e', 'avatarUrl': '/avatars/f18351bc5ce9c106ba74523d9a55567c.svg', 'fullname': 'Lone Striker', 'name': 'LoneStriker', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 987, 'isFollowing': False}, {'_id': '638a3a2efe3185ae7324f1f6', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1670003187019-noauth.png', 'fullname': 'brucethemoose', 'name': 'brucethemoose', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 160, 'isFollowing': False}, {'_id': '6569216f9c96f1a47bf45788', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/6569216f9c96f1a47bf45788/mCLqmAs4dOjKdxNQVAp1w.png', 'fullname': 'Sica Rius', 'name': 'SicariusSicariiStuff', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 388, 'isFollowing': False}]",/posts/winglian/204586934082310,2996,,8 +https://cdn-avatars.huggingface.co/v1/production/uploads/1674929746905-60f1abe7544c2adfd699860c.jpeg,7219.0,AK,akhaliq,941066614615290,"[{'type': 'text', 'value': 'Here is my selection of papers for today (8 Jan)', 'raw': 'Here is my selection of papers for today (8 Jan)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://huggingface.co/papers', 'raw': 'https://huggingface.co/papers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DocGraphLM: Documental Graph Language Model for Information Extraction', 'raw': 'DocGraphLM: Documental Graph Language Model for Information Extraction'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Denoising Vision Transformers', 'raw': 'Denoising Vision Transformers'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Progressive Knowledge Distillation Of Stable Diffusion XL Using Layer Level Loss', 'raw': 'Progressive Knowledge Distillation Of Stable Diffusion XL Using Layer Level Loss'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Open-Vocabulary SAM: Segment and Recognize Twenty-thousand Classes Interactively', 'raw': 'Open-Vocabulary SAM: Segment and Recognize Twenty-thousand Classes Interactively'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Pheme: Efficient and Conversational Speech Generation', 'raw': 'Pheme: Efficient and Conversational Speech Generation'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'DeepSeek LLM: Scaling Open-Source Language Models with Longtermism', 'raw': 'DeepSeek LLM: Scaling Open-Source Language Models with Longtermism'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Infinite-LLM: Efficient LLM Service for Long Context with DistAttention and Distributed KVCache', 'raw': 'Infinite-LLM: Efficient LLM Service for Long Context with DistAttention and Distributed KVCache'}]","Here is my selection of papers for today (8 Jan) + +https://huggingface.co/papers + +DocGraphLM: Documental Graph Language Model for Information Extraction + +Denoising Vision Transformers + +Progressive Knowledge Distillation Of Stable Diffusion XL Using Layer Level Loss + +Open-Vocabulary SAM: Segment and Recognize Twenty-thousand Classes Interactively + +Pheme: Efficient and Conversational Speech Generation + +DeepSeek LLM: Scaling Open-Source Language Models with Longtermism + +Infinite-LLM: Efficient LLM Service for Long Context with DistAttention and Distributed KVCache","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60f1abe7544c2adfd699860c/lCGGPmlQU0_RpspuKBsJj.png'}]",[],"[{'reaction': '❤️', 'users': ['reach-vb', 'victor', 'merve'], 'count': 3}]",2024-01-08 15:22:48,2024-01-08 15:22:48.754,[],/posts/akhaliq/941066614615290,9,,0 +https://cdn-avatars.huggingface.co/v1/production/uploads/1669551186189-63732ebbbd81fae2b3aaf3fb.jpeg,296.0,Knut Jägersberg,KnutJaegersberg,248307299871659,"[{'type': 'text', 'value': 'QuIP# ecosystem is growing :) ', 'raw': 'QuIP# ecosystem is growing :) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I've seen a quip# 2 bit Qwen-72b-Chat model today on the hub that shows there is support for vLLM inference. "", 'raw': ""I've seen a quip# 2 bit Qwen-72b-Chat model today on the hub that shows there is support for vLLM inference. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""This will speed up inference and make high performing 2 bit models more practical. I'm considering quipping MoMo now, as I can only use brief context window of Qwen-72b on my system otherwise, even with bnb double quantization. "", 'raw': ""This will speed up inference and make high performing 2 bit models more practical. I'm considering quipping MoMo now, as I can only use brief context window of Qwen-72b on my system otherwise, even with bnb double quantization. ""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'model', 'id': 'keyfan/Qwen-72B-Chat-2bit'}, 'url': 'https://huggingface.co/keyfan/Qwen-72B-Chat-2bit', 'raw': 'https://huggingface.co/keyfan/Qwen-72B-Chat-2bit'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Also notice the easier to use Quip# for all library :) ', 'raw': 'Also notice the easier to use Quip# for all library :) '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'link', 'href': 'https://github.com/chu-tianxiang/QuIP-for-all', 'raw': 'https://github.com/chu-tianxiang/QuIP-for-all'}]","QuIP# ecosystem is growing :) + +I've seen a quip# 2 bit Qwen-72b-Chat model today on the hub that shows there is support for vLLM inference. +This will speed up inference and make high performing 2 bit models more practical. I'm considering quipping MoMo now, as I can only use brief context window of Qwen-72b on my system otherwise, even with bnb double quantization. + +https://huggingface.co/keyfan/Qwen-72B-Chat-2bit + +Also notice the easier to use Quip# for all library :) + +https://github.com/chu-tianxiang/QuIP-for-all",[],[],"[{'reaction': '🤗', 'users': ['Felladrin', 'osanseviero', 'victor', 'ybelkada', 'reach-vb', 'merve', 'samusenps', 'mexicanamerican'], 'count': 8}, {'reaction': '👍', 'users': ['osanseviero', 'ybelkada', 'reach-vb', 'Tonic', 'kramp', 'merve', 'abidlabs'], 'count': 7}, {'reaction': '❤️', 'users': ['osanseviero', 'ybelkada', 'reach-vb', 'merve', 'samusenps', 'sbrandeis'], 'count': 6}, {'reaction': '🤯', 'users': ['reach-vb', 'merve', 'sbrandeis'], 'count': 3}]",2024-01-07 20:45:46,2024-07-13 06:01:19.758,"[{'_id': '5dd96eb166059660ed1ee413', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/5dd96eb166059660ed1ee413/NQtzmrDdbG0H8qkZvRyGk.jpeg', 'fullname': 'Julien Chaumond', 'name': 'julien-c', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 2738, 'isFollowing': False}, {'_id': '5fcfb7c407408029ba3577e2', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/1608146735109-5fcfb7c407408029ba3577e2.png', 'fullname': 'Simon Brandeis', 'name': 'sbrandeis', 'type': 'user', 'isPro': False, 'isHf': True, 'isHfAdmin': True, 'isMod': False, 'followerCount': 170, 'isFollowing': False}]",/posts/KnutJaegersberg/248307299871659,680,,2 +https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg,334.0,Daniel Vila,dvilasuero,321536384660058,"[{'type': 'text', 'value': '👋 Hi there!', 'raw': '👋 Hi there!'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'This is my very first post. ', 'raw': 'This is my very first post. '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""I'll use it to share some old news: a math preference dataset for DPO!"", 'raw': ""I'll use it to share some old news: a math preference dataset for DPO!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'I created this dataset some time ago while we were developing distilabel (', 'raw': 'I created this dataset some time ago while we were developing distilabel ('}, {'type': 'link', 'href': 'https://github.com/argilla-io/distilabel', 'raw': 'https://github.com/argilla-io/distilabel'}, {'type': 'text', 'value': ').', 'raw': ').'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""Some days ago we found out people are actually using it! So I'll use this post to explain how I built it in case it's useful for the community."", 'raw': ""Some days ago we found out people are actually using it! So I'll use this post to explain how I built it in case it's useful for the community.""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""1. I used distilabel's SelfInstruct-inspired task to generate instructions about different math topics. I curated the instructions with Argilla (on Spaces!)."", 'raw': ""1. I used distilabel's SelfInstruct-inspired task to generate instructions about different math topics. I curated the instructions with Argilla (on Spaces!).""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '2. Then I used a distilabel Pipeline to build a preference dataset using gpt3.5 as generator and gpt4 as labeller. If I recall correctly I used our JudgeLM implementation (see ', 'raw': '2. Then I used a distilabel Pipeline to build a preference dataset using gpt3.5 as generator and gpt4 as labeller. If I recall correctly I used our JudgeLM implementation (see '}, {'type': 'link', 'href': 'https://distilabel.argilla.io/latest/technical-reference/tasks/#judgelmtask', 'raw': 'https://distilabel.argilla.io/latest/technical-reference/tasks/#judgelmtask'}, {'type': 'text', 'value': ')', 'raw': ')'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '(see the screenshot with the dataset in the Argilla UI)', 'raw': '(see the screenshot with the dataset in the Argilla UI)'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': '3. Then I just binarized into chosen, rejected pairs and voilà:', 'raw': '3. Then I just binarized into chosen, rejected pairs and voilà:'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'resource', 'resource': {'type': 'dataset', 'id': 'argilla/distilabel-math-preference-dpo'}, 'url': 'https://huggingface.co/datasets/argilla/distilabel-math-preference-dpo', 'raw': 'https://huggingface.co/datasets/argilla/distilabel-math-preference-dpo'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'The funny thing is that I used this to do a second DPO run over Notus-7B. I hoped to see an improvement on math/reasoning skills but it actually improved in STEM and Humanities and did worse on Math 🤣 . ', 'raw': 'The funny thing is that I used this to do a second DPO run over Notus-7B. I hoped to see an improvement on math/reasoning skills but it actually improved in STEM and Humanities and did worse on Math 🤣 . '}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': ""In conclusion, this dataset was only a quick experiement. I'm happy to see the community found it useful. Data for DPO and fine-tuning are still a mystery, let's unveil these mysteries in 2024 together!"", 'raw': ""In conclusion, this dataset was only a quick experiement. I'm happy to see the community found it useful. Data for DPO and fine-tuning are still a mystery, let's unveil these mysteries in 2024 together!""}, {'type': 'new_line', 'raw': '\n'}, {'type': 'new_line', 'raw': '\n'}, {'type': 'text', 'value': 'Follow me for the most exciting datasets for LLMs (and maybe some great, small, efficient models). I plan to announce all Argilla open-source work here!', 'raw': 'Follow me for the most exciting datasets for LLMs (and maybe some great, small, efficient models). I plan to announce all Argilla open-source work here!'}]","👋 Hi there! + +This is my very first post. + +I'll use it to share some old news: a math preference dataset for DPO! + +I created this dataset some time ago while we were developing distilabel (https://github.com/argilla-io/distilabel). + +Some days ago we found out people are actually using it! So I'll use this post to explain how I built it in case it's useful for the community. + +1. I used distilabel's SelfInstruct-inspired task to generate instructions about different math topics. I curated the instructions with Argilla (on Spaces!). +2. Then I used a distilabel Pipeline to build a preference dataset using gpt3.5 as generator and gpt4 as labeller. If I recall correctly I used our JudgeLM implementation (see https://distilabel.argilla.io/latest/technical-reference/tasks/#judgelmtask) + +(see the screenshot with the dataset in the Argilla UI) + +3. Then I just binarized into chosen, rejected pairs and voilà: + +https://huggingface.co/datasets/argilla/distilabel-math-preference-dpo + +The funny thing is that I used this to do a second DPO run over Notus-7B. I hoped to see an improvement on math/reasoning skills but it actually improved in STEM and Humanities and did worse on Math 🤣 . + +In conclusion, this dataset was only a quick experiement. I'm happy to see the community found it useful. Data for DPO and fine-tuning are still a mystery, let's unveil these mysteries in 2024 together! + +Follow me for the most exciting datasets for LLMs (and maybe some great, small, efficient models). I plan to announce all Argilla open-source work here!","[{'type': 'image', 'url': 'https://cdn-uploads.huggingface.co/production/uploads/60420dccc15e823a685f2b03/MK1bnv_z9xYAs2ptTdZcX.png'}]",[],"[{'reaction': '👍', 'users': ['pacoid', 'sparkycollier', 'osanseviero', 'gabrielmbmb', 'mishig', 'severo', 'sugatoray', 'davanstrien', 'mlabonne', 'qJakc', 'MoritzLaurer', 'KnutJaegersberg', 'radames', 'victor', 'clem', 'julien-c', 'merve', 'tmnam20', 'Erland', 'DarkLord7771', 'abidlabs', 'Masa-Erland'], 'count': 22}, {'reaction': '❤️', 'users': ['pcuenq', 'osanseviero', 'gabrielmbmb', 'mishig', 'plaguss', 'davanstrien', 'pierrci', 'qJakc', 'thomwolf', 'Manel-Hik', 'victor', 'radames', 'mariagrandury', 'clem', 'julien-c', 'merve', 'sbrandeis'], 'count': 17}, {'reaction': '🤗', 'users': ['qJakc', 'Tonic', 'mariagrandury', 'julien-c', 'davanstrien', 'merve'], 'count': 6}]",2024-01-06 18:47:54,2024-01-07 11:45:02.438,"[{'_id': '60420dccc15e823a685f2b03', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/60420dccc15e823a685f2b03/AOApMWt_jvm9e6XQ2vlrJ.jpeg', 'fullname': 'Daniel Vila', 'name': 'dvilasuero', 'type': 'user', 'isPro': True, 'isHf': True, 'isHfAdmin': False, 'isMod': False, 'followerCount': 334, 'isFollowing': False}, {'_id': '62a3bb1cd0d8c2c2169f0b88', 'avatarUrl': 'https://cdn-avatars.huggingface.co/v1/production/uploads/62a3bb1cd0d8c2c2169f0b88/eT2TS0IlQbZtz-F_zHLz9.jpeg', 'fullname': 'Joseph [open/acc] Pollack', 'name': 'Tonic', 'type': 'user', 'isPro': False, 'isHf': False, 'isHfAdmin': False, 'isMod': False, 'followerCount': 415, 'isFollowing': False}]",/posts/dvilasuero/321536384660058,32,,2