[{"data":1,"prerenderedAt":-1},["ShallowReactive",2],{"similar-pulp-platform--pulp-dronet":3,"tool-pulp-platform--pulp-dronet":64},[4,17,27,35,43,56],{"id":5,"name":6,"github_repo":7,"description_zh":8,"stars":9,"difficulty_score":10,"last_commit_at":11,"category_tags":12,"status":16},3808,"stable-diffusion-webui","AUTOMATIC1111\u002Fstable-diffusion-webui","stable-diffusion-webui 是一个基于 Gradio 构建的网页版操作界面，旨在让用户能够轻松地在本地运行和使用强大的 Stable Diffusion 图像生成模型。它解决了原始模型依赖命令行、操作门槛高且功能分散的痛点，将复杂的 AI 绘图流程整合进一个直观易用的图形化平台。\n\n无论是希望快速上手的普通创作者、需要精细控制画面细节的设计师，还是想要深入探索模型潜力的开发者与研究人员，都能从中获益。其核心亮点在于极高的功能丰富度：不仅支持文生图、图生图、局部重绘（Inpainting）和外绘（Outpainting）等基础模式，还独创了注意力机制调整、提示词矩阵、负向提示词以及“高清修复”等高级功能。此外，它内置了 GFPGAN 和 CodeFormer 等人脸修复工具，支持多种神经网络放大算法，并允许用户通过插件系统无限扩展能力。即使是显存有限的设备，stable-diffusion-webui 也提供了相应的优化选项，让高质量的 AI 艺术创作变得触手可及。",162132,3,"2026-04-05T11:01:52",[13,14,15],"开发框架","图像","Agent","ready",{"id":18,"name":19,"github_repo":20,"description_zh":21,"stars":22,"difficulty_score":23,"last_commit_at":24,"category_tags":25,"status":16},1381,"everything-claude-code","affaan-m\u002Feverything-claude-code","everything-claude-code 是一套专为 AI 编程助手（如 Claude Code、Codex、Cursor 等）打造的高性能优化系统。它不仅仅是一组配置文件，而是一个经过长期实战打磨的完整框架，旨在解决 AI 代理在实际开发中面临的效率低下、记忆丢失、安全隐患及缺乏持续学习能力等核心痛点。\n\n通过引入技能模块化、直觉增强、记忆持久化机制以及内置的安全扫描功能，everything-claude-code 能显著提升 AI 在复杂任务中的表现，帮助开发者构建更稳定、更智能的生产级 AI 代理。其独特的“研究优先”开发理念和针对 Token 消耗的优化策略，使得模型响应更快、成本更低，同时有效防御潜在的攻击向量。\n\n这套工具特别适合软件开发者、AI 研究人员以及希望深度定制 AI 工作流的技术团队使用。无论您是在构建大型代码库，还是需要 AI 协助进行安全审计与自动化测试，everything-claude-code 都能提供强大的底层支持。作为一个曾荣获 Anthropic 黑客大奖的开源项目，它融合了多语言支持与丰富的实战钩子（hooks），让 AI 真正成长为懂上",138956,2,"2026-04-05T11:33:21",[13,15,26],"语言模型",{"id":28,"name":29,"github_repo":30,"description_zh":31,"stars":32,"difficulty_score":23,"last_commit_at":33,"category_tags":34,"status":16},2271,"ComfyUI","Comfy-Org\u002FComfyUI","ComfyUI 是一款功能强大且高度模块化的视觉 AI 引擎，专为设计和执行复杂的 Stable Diffusion 图像生成流程而打造。它摒弃了传统的代码编写模式，采用直观的节点式流程图界面，让用户通过连接不同的功能模块即可构建个性化的生成管线。\n\n这一设计巧妙解决了高级 AI 绘图工作流配置复杂、灵活性不足的痛点。用户无需具备编程背景，也能自由组合模型、调整参数并实时预览效果，轻松实现从基础文生图到多步骤高清修复等各类复杂任务。ComfyUI 拥有极佳的兼容性，不仅支持 Windows、macOS 和 Linux 全平台，还广泛适配 NVIDIA、AMD、Intel 及苹果 Silicon 等多种硬件架构，并率先支持 SDXL、Flux、SD3 等前沿模型。\n\n无论是希望深入探索算法潜力的研究人员和开发者，还是追求极致创作自由度的设计师与资深 AI 绘画爱好者，ComfyUI 都能提供强大的支持。其独特的模块化架构允许社区不断扩展新功能，使其成为当前最灵活、生态最丰富的开源扩散模型工具之一，帮助用户将创意高效转化为现实。",107662,"2026-04-03T11:11:01",[13,14,15],{"id":36,"name":37,"github_repo":38,"description_zh":39,"stars":40,"difficulty_score":23,"last_commit_at":41,"category_tags":42,"status":16},3704,"NextChat","ChatGPTNextWeb\u002FNextChat","NextChat 是一款轻量且极速的 AI 助手，旨在为用户提供流畅、跨平台的大模型交互体验。它完美解决了用户在多设备间切换时难以保持对话连续性，以及面对众多 AI 模型不知如何统一管理的痛点。无论是日常办公、学习辅助还是创意激发，NextChat 都能让用户随时随地通过网页、iOS、Android、Windows、MacOS 或 Linux 端无缝接入智能服务。\n\n这款工具非常适合普通用户、学生、职场人士以及需要私有化部署的企业团队使用。对于开发者而言，它也提供了便捷的自托管方案，支持一键部署到 Vercel 或 Zeabur 等平台。\n\nNextChat 的核心亮点在于其广泛的模型兼容性，原生支持 Claude、DeepSeek、GPT-4 及 Gemini Pro 等主流大模型，让用户在一个界面即可自由切换不同 AI 能力。此外，它还率先支持 MCP（Model Context Protocol）协议，增强了上下文处理能力。针对企业用户，NextChat 提供专业版解决方案，具备品牌定制、细粒度权限控制、内部知识库整合及安全审计等功能，满足公司对数据隐私和个性化管理的高标准要求。",87618,"2026-04-05T07:20:52",[13,26],{"id":44,"name":45,"github_repo":46,"description_zh":47,"stars":48,"difficulty_score":23,"last_commit_at":49,"category_tags":50,"status":16},2268,"ML-For-Beginners","microsoft\u002FML-For-Beginners","ML-For-Beginners 是由微软推出的一套系统化机器学习入门课程，旨在帮助零基础用户轻松掌握经典机器学习知识。这套课程将学习路径规划为 12 周，包含 26 节精炼课程和 52 道配套测验，内容涵盖从基础概念到实际应用的完整流程，有效解决了初学者面对庞大知识体系时无从下手、缺乏结构化指导的痛点。\n\n无论是希望转型的开发者、需要补充算法背景的研究人员，还是对人工智能充满好奇的普通爱好者，都能从中受益。课程不仅提供了清晰的理论讲解，还强调动手实践，让用户在循序渐进中建立扎实的技能基础。其独特的亮点在于强大的多语言支持，通过自动化机制提供了包括简体中文在内的 50 多种语言版本，极大地降低了全球不同背景用户的学习门槛。此外，项目采用开源协作模式，社区活跃且内容持续更新，确保学习者能获取前沿且准确的技术资讯。如果你正寻找一条清晰、友好且专业的机器学习入门之路，ML-For-Beginners 将是理想的起点。",84991,"2026-04-05T10:45:23",[14,51,52,53,15,54,26,13,55],"数据工具","视频","插件","其他","音频",{"id":57,"name":58,"github_repo":59,"description_zh":60,"stars":61,"difficulty_score":10,"last_commit_at":62,"category_tags":63,"status":16},3128,"ragflow","infiniflow\u002Fragflow","RAGFlow 是一款领先的开源检索增强生成（RAG）引擎，旨在为大语言模型构建更精准、可靠的上下文层。它巧妙地将前沿的 RAG 技术与智能体（Agent）能力相结合，不仅支持从各类文档中高效提取知识，还能让模型基于这些知识进行逻辑推理和任务执行。\n\n在大模型应用中，幻觉问题和知识滞后是常见痛点。RAGFlow 通过深度解析复杂文档结构（如表格、图表及混合排版），显著提升了信息检索的准确度，从而有效减少模型“胡编乱造”的现象，确保回答既有据可依又具备时效性。其内置的智能体机制更进一步，使系统不仅能回答问题，还能自主规划步骤解决复杂问题。\n\n这款工具特别适合开发者、企业技术团队以及 AI 研究人员使用。无论是希望快速搭建私有知识库问答系统，还是致力于探索大模型在垂直领域落地的创新者，都能从中受益。RAGFlow 提供了可视化的工作流编排界面和灵活的 API 接口，既降低了非算法背景用户的上手门槛，也满足了专业开发者对系统深度定制的需求。作为基于 Apache 2.0 协议开源的项目，它正成为连接通用大模型与行业专有知识之间的重要桥梁。",77062,"2026-04-04T04:44:48",[15,14,13,26,54],{"id":65,"github_repo":66,"name":67,"description_en":68,"description_zh":69,"ai_summary_zh":69,"readme_en":70,"readme_zh":71,"quickstart_zh":72,"use_case_zh":73,"hero_image_url":74,"owner_login":75,"owner_name":75,"owner_avatar_url":76,"owner_bio":77,"owner_company":77,"owner_location":77,"owner_email":77,"owner_twitter":77,"owner_website":77,"owner_url":78,"languages":79,"stars":96,"forks":97,"last_commit_at":98,"license":99,"difficulty_score":100,"env_os":101,"env_gpu":102,"env_ram":103,"env_deps":104,"category_tags":112,"github_topics":113,"view_count":23,"oss_zip_url":77,"oss_zip_packed_at":77,"status":16,"created_at":123,"updated_at":124,"faqs":125,"releases":126},2089,"pulp-platform\u002Fpulp-dronet","pulp-dronet","A deep learning-powered visual navigation engine to enables autonomous navigation of pocket-size quadrotor - running on PULP","pulp-dronet 是一款专为口袋大小四旋翼无人机打造的深度学习视觉导航引擎。它致力于解决微型无人机在算力受限、功耗严苛的环境下，如何实现完全自主飞行与避障的难题。传统方案往往依赖沉重的机载电脑或外部定位系统，而 pulp-dronet 通过将高度压缩的深度神经网络直接运行在超低功耗的 PULP 芯片上，让纳米级无人机也能具备“看”懂环境并独立规划路径的能力。\n\n这款工具特别适合嵌入式系统开发者、机器人研究人员以及从事微型无人机设计的工程师使用。其核心亮点在于极致的轻量化与高效能：通过先进的模型蒸馏技术，它在保持高精度导航的同时，将功耗控制在毫瓦级别，实现了真正的端侧实时推理。无论是穿越复杂障碍物还是执行动态任务，pulp-dronet 都证明了即便在资源极度受限的硬件上，人工智能也能驱动小型飞行器完成高难度的自主导航任务，是探索边缘计算与微型机器人结合的理想开源平台。","\u003Cdiv align=\"center\"; margin: 0px; padding: 0px;\">\n  \u003Cimg alt=\"GitHub Repo stars\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub forks\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fforks\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub watchers\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fwatchers\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub License\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fcontributors\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub License\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Flicense\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n\u003C\u002Fdiv>\n\n\u003C!-- Possible styles\nstyle=social\nstyle=for-the-badge\nstyle=flat-square\n -->\n\n\u003Ch1 align=\"center\">PULP-Dronet\u003C\u002Fh1>\n\n\u003Ch3 align=\"center\">\u003Ci>Judge me by my size, do you? --Yoda, TESB\u003C\u002Fi>\u003C\u002Fh3>\n\n\u003Cp align=\"center\"> \u003Ci> Copyright (C) 2024 ETH Zürich, University of Bologna. All rights reserved. \u003C\u002Fi> \u003C\u002Fp>\n\n\u003Cdiv align=\"center\">\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_daa15f5fe9bf.png\" width=\"98%\">\n  \u003Cbr>\u003Cbr>\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_135970c67b25.gif\" width=\"49%\">\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_7dc329c0eb52.gif\" width=\"49%\">\n\u003C\u002Fdiv>\n\n\n\n## **Videos**\n\n* **PULP-Dronet v1:** [video1](https:\u002F\u002Fyoutu.be\u002F57Vy5cSvnaA), [video2](https:\u002F\u002Fyoutu.be\u002FJKY03NV3C2s).\n* **PULP-Dronet v2:** [video1](https:\u002F\u002Fyoutu.be\u002F41IwjAXmFQ0), [video2](https:\u002F\u002Fyoutu.be\u002FCd9GyTl6tHI).\n* **PULP-Dronet v3:** [video](https:\u002F\u002Fyoutu.be\u002FehNlDyhsVSc)\n\n\n*Subscribe to our [PULP Platform](https:\u002F\u002Fwww.youtube.com\u002Fc\u002FPULPPlatform) youtube channel!*\n\n\n## **Citing**\n\nIf you use PULP-Dronet in an academic or industrial context, please cite the listed publications:\n\n* *\"Distilling Tiny and Ultra-fast Deep Neural Networks for Autonomous Navigation on Nano-UAVs\"*, 2024([IEEE IoT Journal](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F10606040), [arXiv](https:\u002F\u002Farxiv.org\u002Fabs\u002F2407.12675#))\n* *\"Tiny-PULP-Dronets: Squeezing Neural Networks for Faster and Lighter Inference on Multi-Tasking Autonomous Nano-Drones\"*, 2022([IEEE AICAS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9869931), [arXiv](https:\u002F\u002Farxiv.org\u002Fabs\u002F2407.02405))\n* *\"Improving Autonomous Nano-Drones Performance via Automated End-to-End Optimization and Deployment of DNNs\"*, 2021 ([IEEE JETCAS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9606685))\n* *\"Automated Tuning of End-to-end Neural FlightControllers for Autonomous Nano-drones\"*, 2021 ([IEEE AICAS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9458550))\n* *\"A 64mW DNN-based Visual Navigation Engine for Autonomous Nano-Drones\"*, 2019 ( [IEEE IoT Journal](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8715489), [arXiv preprint](https:\u002F\u002Farxiv.org\u002Fabs\u002F1805.01831))\n* *\"An Open Source and Open Hardware Deep Learning-powered Visual Navigation Engine for Autonomous Nano-UAVs\"*, 2019 ([IEEE DCOSS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8804776), [arXiv preprint](https:\u002F\u002Farxiv.org\u002Fabs\u002F1905.04166))\n\n\n```bibtex\n@article{lamberti2024pulpdronetIOTJ,\n  author={Lamberti, Lorenzo and Bellone, Lorenzo and Macan, Luka and Natalizio, Enrico and Conti, Francesco and Palossi, Daniele and Benini, Luca},\n  journal={IEEE Internet of Things Journal},\n  title={Distilling Tiny and Ultra-fast Deep Neural Networks for Autonomous Navigation on Nano-UAVs},\n  year={2024},\n  volume={},\n  number={},\n  pages={1-1},\n  keywords={Navigation;Task analysis;Artificial intelligence;Internet of Things;Autonomous robots;Throughput;Collision avoidance;Autonomous Nano-UAV;Embedded Devices;Ultra-low-power;Artificial Intelligence;Mobile and Ubiquitous Systems},\n  doi={10.1109\u002FJIOT.2024.3431913}\n}\n```\n\n```bibtex\n@INPROCEEDINGS{lamberti2022tinypulpdronetAICAS,\n  author={Lamberti, Lorenzo and Niculescu, Vlad and Barciś, Michał and Bellone, Lorenzo and Natalizio, Enrico and Benini, Luca and Palossi, Daniele},\n  booktitle={2022 IEEE 4th International Conference on Artificial Intelligence Circuits and Systems (AICAS)},\n  title={Tiny-PULP-Dronets: Squeezing Neural Networks for Faster and Lighter Inference on Multi-Tasking Autonomous Nano-Drones},\n  year={2022},\n  volume={},\n  number={},\n  pages={287-290},\n  doi={10.1109\u002FAICAS54282.2022.9869931}\n}\n```\n\n\n```bibtex\n@ARTICLE{niculescu2021pulpdronetJETCAS,\n  author={Niculescu, Vlad and Lamberti, Lorenzo and Conti, Francesco and Benini, Luca and Palossi, Daniele},\n  journal={IEEE Journal on Emerging and Selected Topics in Circuits and Systems},\n  title={Improving Autonomous Nano-drones Performance via Automated End-to-End Optimization and Deployment of DNNs},\n  year={2021},\n  volume={},\n  number={},\n  pages={1-1},\n  doi={10.1109\u002FJETCAS.2021.3126259}\n}\n```\n\n```bibtex\n@inproceedings{niculescu2021pulpdronetAICAS,\n  author={V. {Niculescu} and L. {Lamberti} and D. {Palossi} and L. {Benini}},\n  booktitle={2021 IEEE International Conference on Artificial Intelligence Circuits and Systems (AICAS)},\n  title={Automated Tuning of End-to-end Neural FlightControllers for Autonomous Nano-drones},\n  pages={},\n  keywords={autonomous navigation, nano-size UAVs, deep learning, CNN, heterogeneous computing, parallel ultra-low power, bio-inspired},\n  doi={},\n  ISSN={},\n  month={},\n  year={2021},\n}\n```\n\n```bibtex\n@article{palossi2019pulpdronetIoTJ,\n  author={D. {Palossi} and A. {Loquercio} and F. {Conti} and E. {Flamand} and D. {Scaramuzza} and L. {Benini}},\n  title={A 64mW DNN-based Visual Navigation Engine for Autonomous Nano-Drones},\n  journal={IEEE Internet of Things Journal},\n  doi={10.1109\u002FJIOT.2019.2917066},\n  ISSN={2327-4662},\n  year={2019}\n}\n```\n\n```bibtex\n@inproceedings{palossi2019pulpdronetDCOSS,\n  author={D. {Palossi} and F. {Conti} and L. {Benini}},\n  booktitle={2019 15th International Conference on Distributed Computing in Sensor Systems (DCOSS)},\n  title={An Open Source and Open Hardware Deep Learning-Powered Visual Navigation Engine for Autonomous Nano-UAVs},\n  pages={604-611},\n  keywords={autonomous navigation, nano-size UAVs, deep learning, CNN, heterogeneous computing, parallel ultra-low power, bio-inspired},\n  doi={10.1109\u002FDCOSS.2019.00111},\n  ISSN={2325-2944},\n  month={May},\n  year={2019},\n}\n```\n\n\n## 1. Introduction\n### What is PULP-Dronet ?\n**PULP-Dronet** is a deep learning-powered *visual navigation engine* that enables autonomous navigation of a pocket-size quadrotor in a previously unseen environment.\nThanks to PULP-Dronet the nano-drone can explore the environment, avoiding collisions also with dynamic obstacles, in complete autonomy -- **no human operator, no ad-hoc external signals, and no remote laptop!**\nThis means that all the complex computations are done directly aboard the vehicle and very fast.\nThe visual navigation engine is composed of both a software and a hardware part.\n\n- **Software component:**\nThe software part is based on the previous [DroNet](https:\u002F\u002Fgithub.com\u002Fuzh-rpg\u002Frpg_public_dronet) project developed by the [RPG](http:\u002F\u002Frpg.ifi.uzh.ch\u002F) from the University of Zürich (UZH).\nDroNet is a shallow convolutional neural network (CNN) which has been used to control a standard-size quadrotor in a set of environments via remote computation.\n\n- **Hardware components:**\nThe hardware soul of PULP-Dronet is an ultra-low power visual navigation module embodied by a pluggable PCB (called *shield* or *deck*) for the [Crazyflie 2.0](https:\u002F\u002Fwww.bitcraze.io\u002Fcrazyflie-2\u002F)\u002F[2.1](https:\u002F\u002Fwww.bitcraze.io\u002Fcrazyflie-2-1\u002F) nano-drone. The shield features a Parallel Ultra-Low-Power (PULP) GAP8 System-on-Chip (SoC) from GreenWaves Technologies (GWT), an ultra-low power HiMax HBM01 camera, and off-chip Flash\u002FDRAM memory; This pluggable PCB has evolved over time, from the [*PULP-Shield*](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8715489) , the first custom-made prototype version developed at ETH Zürich, and its commercial off-the-shelf evolution, the [*AI-deck*](https:\u002F\u002Fstore.bitcraze.io\u002Fproducts\u002Fai-deck).\n\n\n\n## Evolution of PULP-Dronet\n\n\u003Cimg style=\"float: center;\" src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_7e899f016664.png\" width=\"100%\">\n\n\n###  **PULP-Dronet v1:**\nThe first version of PULP-Dronet, which gave the birth to the PULP-Shield: a lightweight, modular and configurable printed circuit board (PCB) with highly optimized layout and a form factor compatible with the Crazyflie nano-sized quad-rotor.\nWe developed a [general methodology](https:\u002F\u002Farxiv.org\u002Fabs\u002F1805.01831) for deploying state-of-the-art deep learning algorithms on top of ultra-low power embedded computation nodes, like a miniaturized drone, and then we [automated the whole process](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9458550).\nOur novel methodology allowed us first to deploy DroNet on the _PULP-Shield_, and then demonstrating how it enables the execution the CNN on board the CrazyFlie 2.0 within only 64-284mW and with a throughput of 6-18 frame-per-second!\nFinally, we field-prove our methodology presenting a closed-loop fully working demonstration of vision-driven autonomous navigation relying only on onboard resources, and within an ultra-low power budget.\nSee the videos on the [PULP Platform Youtube](https:\u002F\u002Fwww.youtube.com\u002Fchannel\u002FUCpad_lwSfoMZkb6X7FdjU0g) channel: [video](https:\u002F\u002Fyoutu.be\u002FJKY03NV3C2s).\n\nSummary of characteristics:\n\n- **Hardware:** [*PULP-Shield*](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8715489)\n\n- **Deep learning framework:** Tensorflow\u002FKeras\n\n- **Quantization**: fixed-point 16 bits, hand crafted\n\n- **Deployment tool**: _AutoTiler_ (early release, developed in collaboration with GreenWaves Technologies)\n\nWe release here, as open source, all our code, hardware designs, datasets, and trained networks.\n\n###  **PULP-Dronet v2:**\n\nThis follow-up takes advantage of a new commercial-off-the-shelf PCB design based on the PULP-Shield, now developed and distributed by Bitcraze: the AI-deck. Our work focused in automating the whole deployment process of a convolutional neural network, which required significant complexity reduction and fine-grained hand-tuning to be successfully deployed aboard a flying nano-drone.\nTherefore, we introduce methodologies and software tools to streamline and automate all the deployment stages on a low-power commercial\nmulticore SoC, investigating both academic (NEMO + DORY) and industrial (GAPflow by GreenWaves) tool-sets. We reduced by 2× the memory footprint of PULP-Dronet v1, employing a fixed-point 8 bit quantization, achieving a speedup of 1.6× in the inference time, compared to the original hand-crafted CNN, with the same prediction accuracy.\nOur fully automated deployment methodology allowed us first to deploy DroNet on the AI-Deck, and then demonstrating how it enables the execution the CNN on board the CrazyFlie 2.1 within only 35-102mW and with a throughput of 9-17 frames\u002Fs!\n\nSummary of characteristics:\n\n- **Hardware:** [*AI-deck*](https:\u002F\u002Fstore.bitcraze.io\u002Fproducts\u002Fai-deck)\n\n- **Deep learning framework:** Pytorch\n\n- **Quantization**: fixed-point 8 bits, fully automated (with both academic [NEMO](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fnemo) and the industrial [NNTool](https:\u002F\u002Fgreenwaves-technologies.com\u002Fsdk-manuals\u002Fnn_quick_start_guide\u002F))\n\n- **Deployment**: fully automated (with both the academic [DORY](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fdory) and the industrial [AutoTiler](https:\u002F\u002Fgreenwaves-technologies.com\u002Fsdk-manuals\u002Fnn_quick_start_guide\u002F))\n\n\n\nWe release here, as open source, all our code, hardware designs, and trained networks.\n\n###  **PULP-Dronet v3:**\n\nAchieving AI multi tasking perception on a nano-UAV presents significant challenges.\nThe extremely limited payload on nano-UAVs restricts them to accommodate only ultra-low-power microcontroller units that have stringent computational and memory constraint, which have prevented the deployment of multiple AI tasks onboard.\nTherefore, we focus on optimizing and minimizing the AI workloads without compromising the drone’s behavior when stressed in real-world testing scenarios.\nWe achieve a speedup of 8.5× in the inference time, compared to PULP-Dronet v2, with an inference throughput of 139 frames\u002Fs.\nMoreover, we develop a methodology for dataset collection on a nano-UAV. We collect unified collision avoidance and steering information only with nano-UAV onboard resources, without dependence on external infrastructures. The resulting PULP-Dronet v3 dataset consists of 66k labeled images.\n\nWe release all our open-source code here, including the PULP-Dronet v3 dataset, our dataset collection framework, and our trained networks.\n\nSummary of characteristics:\n\n- **Hardware:** [*AI-deck*](https:\u002F\u002Fstore.bitcraze.io\u002Fproducts\u002Fai-deck)\n\n- **Deep learning framework:** Pytorch\n\n- **Quantization**: fixed-point 8 bits, fully automated with the academic [NEMO](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fnemo).\n\n- **Deployment**: fully automated with the academic [DORY](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fdory).\n\n- **[Dataset](https:\u002F\u002Fzenodo.org\u002Frecords\u002F13348430)**: custom made, collected with the nano-drone.\n\n\n## License\n\nAll files in this repository are original and licensed under Apache-2.0. See [LICENSE](.\u002FLICENSE).\n\nWe release the dataset ([zenodo.org\u002Frecords\u002F13348430](https:\u002F\u002Fzenodo.org\u002Frecords\u002F13348430)) as open source under Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.\n\nThe licenses of external modules are described in the [LICENSE_README.md](.\u002FLICENSE_README.md)\n","\u003Cdiv align=\"center\"; margin: 0px; padding: 0px;\">\n  \u003Cimg alt=\"GitHub Repo stars\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub forks\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fforks\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub watchers\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fwatchers\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub License\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fcontributors\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n  \u003Cimg alt=\"GitHub License\" src=\"https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Flicense\u002Fpulp-platform\u002Fpulp-dronet?style=flat-square\">\n\u003C\u002Fdiv>\n\n\u003C!-- 可能的样式\nstyle=social\nstyle=for-the-badge\nstyle=flat-square\n -->\n\n\u003Ch1 align=\"center\">PULP-Dronet\u003C\u002Fh1>\n\n\u003Ch3 align=\"center\">\u003Ci>就凭我的体型就来评判我吗？——尤达，《绝地归来》\u003C\u002Fi>\u003C\u002Fh3>\n\n\u003Cp align=\"center\"> \u003Ci> 版权所有 © 2024 苏黎世联邦理工学院、博洛尼亚大学。保留所有权利。 \u003C\u002Fi> \u003C\u002Fp>\n\n\u003Cdiv align=\"center\">\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_daa15f5fe9bf.png\" width=\"98%\">\n  \u003Cbr>\u003Cbr>\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_135970c67b25.gif\" width=\"49%\">\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_7dc329c0eb52.gif\" width=\"49%\">\n\u003C\u002Fdiv>\n\n\n\n## **视频**\n\n* **PULP-Dronet v1:** [视频1](https:\u002F\u002Fyoutu.be\u002F57Vy5cSvnaA), [视频2](https:\u002F\u002Fyoutu.be\u002FJKY03NV3C2s)。\n* **PULP-Dronet v2:** [视频1](https:\u002F\u002Fyoutu.be\u002F41IwjAXmFQ0), [视频2](https:\u002F\u002Fyoutu.be\u002FCd9GyTl6tHI)。\n* **PULP-Dronet v3:** [视频](https:\u002F\u002Fyoutu.be\u002FehNlDyhsVSc)\n\n\n*请订阅我们的[PULP Platform](https:\u002F\u002Fwww.youtube.com\u002Fc\u002FPULPPlatform) YouTube频道！*\n\n\n## **引用**\n\n如果您在学术或工业环境中使用 PULP-Dronet，请引用以下出版物：\n\n* *“为纳米级无人机自主导航提炼微型且超快速的深度神经网络”*, 2024年([IEEE物联网期刊](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F10606040), [arXiv](https:\u002F\u002Farxiv.org\u002Fabs\u002F2407.12675#))\n* *“Tiny-PULP-Dronets：压缩神经网络，实现多任务自主纳米无人机更快速、更轻量化的推理”*, 2022年([IEEE AICAS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9869931), [arXiv](https:\u002F\u002Farxiv.org\u002Fabs\u002F2407.02405))\n* *“通过自动化端到端优化与部署深度神经网络提升自主纳米无人机性能”*, 2021年([IEEE JETCAS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9606685))\n* *“为自主纳米无人机自动调优端到端神经网络飞行控制器”*, 2021年([IEEE AICAS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9458550))\n* *“用于自主纳米无人机的基于64毫瓦DNN的视觉导航引擎”*, 2019年( [IEEE物联网期刊](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8715489), [arXiv预印本](https:\u002F\u002Farxiv.org\u002Fabs\u002F1805.01831))\n* *“面向自主纳米无人机的开源、开放硬件深度学习驱动视觉导航引擎”*, 2019年([IEEE DCOSS](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8804776), [arXiv预印本](https:\u002F\u002Farxiv.org\u002Fabs\u002F1905.04166))\n\n\n```bibtex\n@article{lamberti2024pulpdronetIOTJ,\n  author={Lamberti, Lorenzo and Bellone, Lorenzo and Macan, Luka and Natalizio, Enrico and Conti, Francesco and Palossi, Daniele and Benini, Luca},\n  journal={IEEE Internet of Things Journal},\n  title={Distilling Tiny and Ultra-fast Deep Neural Networks for Autonomous Navigation on Nano-UAVs},\n  year={2024},\n  volume={},\n  number={},\n  pages={1-1},\n  keywords={Navigation;Task analysis;Artificial intelligence;Internet of Things;Autonomous robots;Throughput;Collision avoidance;Autonomous Nano-UAV;Embedded Devices;Ultra-low-power;Artificial Intelligence;Mobile and Ubiquitous Systems},\n  doi={10.1109\u002FJIOT.2024.3431913}\n}\n```\n\n```bibtex\n@INPROCEEDINGS{lamberti2022tinypulpdronetAICAS,\n  author={Lamberti, Lorenzo and Niculescu, Vlad and Barciś, Michał and Bellone, Lorenzo and Natalizio, Enrico and Benini, Luca and Palossi, Daniele},\n  booktitle={2022 IEEE 第四届国际人工智能电路与系统会议（AICAS）},\n  title={Tiny-PULP-Dronets: Squeezing Neural Networks for Faster and Lighter Inference on Multi-Tasking Autonomous Nano-Drones},\n  year={2022},\n  volume={},\n  number={},\n  pages={287-290},\n  doi={10.1109\u002FAICAS54282.2022.9869931}\n}\n```\n\n\n```bibtex\n@ARTICLE{niculescu2021pulpdronetJETCAS,\n  author={Niculescu, Vlad and Lamberti, Lorenzo and Conti, Francesco and Benini, Luca and Palossi, Daniele},\n  journal={IEEE新兴与精选电路及系统主题期刊},\n  title={Improving Autonomous Nano-drones Performance via Automated End-to-End Optimization and Deployment of DNNs},\n  year={2021},\n  volume={},\n  number={},\n  pages={1-1},\n  doi={10.1109\u002FJETCAS.2021.3126259}\n}\n```\n\n```bibtex\n@inproceedings{niculescu2021pulpdronetAICAS,\n  author={V. {Niculescu} and L. {Lamberti} and D. {Palossi} and L. {Benini}},\n  booktitle={2021 IEEE 国际人工智能电路与系统会议（AICAS）},\n  title={Automated Tuning of End-to-end Neural FlightControllers for Autonomous Nano-drones},\n  pages={},\n  keywords={autonomous navigation, nano-size UAVs, deep learning, CNN, heterogeneous computing, parallel ultra-low power, bio-inspired},\n  doi={},\n  ISSN={},\n  month={},\n  year={2021},\n}\n```\n\n```bibtex\n@article{palossi2019pulpdronetIoTJ,\n  author={D. {Palossi} and A. {Loquercio} and F. {Conti} and E. {Flamand} and D. {Scaramuzza} and L. {Benini}},\n  title={A 64mW DNN-based Visual Navigation Engine for Autonomous Nano-Drones},\n  journal={IEEE Internet of Things Journal},\n  doi={10.1109\u002FJIOT.2019.2917066},\n  ISSN={2327-4662},\n  year={2019}\n}\n```\n\n```bibtex\n@inproceedings{palossi2019pulpdronetDCOSS,\n  author={D. {Palossi} and F. {Conti} and L. {Benini}},\n  booktitle={2019年第15届传感器系统分布式计算国际会议（DCOSS）},\n  title={An Open Source and Open Hardware Deep Learning-Powered Visual Navigation Engine for Autonomous Nano-UAVs},\n  pages={604-611},\n  keywords={autonomous navigation, nano-size UAVs, deep learning, CNN, heterogeneous computing, parallel ultra-low power, bio-inspired},\n  doi={10.1109\u002FDCOSS.2019.00111},\n  ISSN={2325-2944},\n  month={May},\n  year={2019},\n}\n```\n\n\n## 1. 引言\n\n### 什么是 PULP-Dronet？\n**PULP-Dronet** 是一款基于深度学习的 *视觉导航引擎*，能够在从未见过的环境中实现袖珍四旋翼无人机的自主导航。\n借助 PULP-Dronet，这款纳米级无人机可以在完全自主的情况下探索环境，并避免与动态障碍物发生碰撞—— **无需人工操作员、无需专用外部信号，也无需远程笔记本电脑！**\n这意味着所有复杂的计算都直接在飞行器上完成，且速度极快。\n该视觉导航引擎由软件和硬件两部分组成。\n\n- **软件组件：**\n软件部分基于苏黎世大学 [RPG](http:\u002F\u002Frpg.ifi.uzh.ch\u002F) 团队此前开发的 [DroNet](https:\u002F\u002Fgithub.com\u002Fuzh-rpg\u002Frpg_public_dronet) 项目。DroNet 是一种浅层卷积神经网络 (CNN)，曾用于通过远程计算控制标准尺寸的四旋翼无人机在多种环境中的飞行。\n\n- **硬件组件：**\nPULP-Dronet 的硬件核心是一个超低功耗视觉导航模块，以可插拔的 PCB 板（称为“shield”或“deck”）的形式集成到 [Crazyflie 2.0](https:\u002F\u002Fwww.bitcraze.io\u002Fcrazyflie-2\u002F)\u002F[2.1](https:\u002F\u002Fwww.bitcraze.io\u002Fcrazyflie-2-1\u002F) 纳米无人机中。该 shield 配备 GreenWaves Technologies (GWT) 的 Parallel Ultra-Low-Power (PULP) GAP8 片上系统 (SoC)、一款超低功耗 HiMax HBM01 摄像头，以及片外 Flash\u002FDRAM 存储器。这一可插拔 PCB 经过多次迭代，从最初在苏黎世联邦理工学院开发的定制原型版本 [*PULP-Shield*](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8715489)，到后来的商业化现成版本 [*AI-deck*](https:\u002F\u002Fstore.bitcraze.io\u002Fproducts\u002Fai-deck)。\n\n## PULP-Dronet 的演进\n\n\u003Cimg style=\"float: center;\" src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_readme_7e899f016664.png\" width=\"100%\">\n\n### **PULP-Dronet v1：**\nPULP-Dronet 的第一个版本，催生了 PULP-Shield：一块轻量级、模块化且可配置的印刷电路板 (PCB)，布局高度优化，外形尺寸与 Crazyflie 纳米级四旋翼无人机兼容。我们开发了一种 [通用方法论](https:\u002F\u002Farxiv.org\u002Fabs\u002F1805.01831) ，用于将最先进的深度学习算法部署到超低功耗嵌入式计算节点上，例如小型化无人机；随后，我们又 [自动化了整个流程](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F9458550) 。我们的新方法使我们能够首先将 DroNet 部署到 _PULP-Shield_ 上，并证明其可在 CrazyFlie 2.0 上以仅 64–284 mW 的功耗运行 CNN，同时达到每秒 6–18 帧的吞吐量！最后，我们通过现场演示验证了这一方法论：仅依靠机载资源，在超低功耗预算内实现了闭环的视觉驱动型自主导航。更多视频请观看 [PULP Platform YouTube 频道](https:\u002F\u002Fwww.youtube.com\u002Fchannel\u002FUCpad_lwSfoMZkb6X7FdjU0g)：[视频](https:\u002F\u002Fyoutu.be\u002FJKY03NV3C2s)。\n\n特性总结：\n\n- **硬件：** [*PULP-Shield*](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8715489)\n\n- **深度学习框架：** Tensorflow\u002FKeras\n\n- **量化：** 手工设计的 16 位定点数\n\n- **部署工具：** _AutoTiler_（早期版本，与 GreenWaves Technologies 合作开发）\n\n我们在此以开源形式发布了所有代码、硬件设计、数据集及训练好的网络。\n\n### **PULP-Dronet v2：**\n此版本基于 PULP-Shield 的商用现成 PCB 设计，现由 Bitcraze 开发并推广为 AI-deck。我们的工作重点在于自动化卷积神经网络的整个部署过程，为此需要大幅简化复杂度并进行精细的手动调优，才能成功部署到飞行中的纳米无人机上。因此，我们引入了方法论和软件工具，以简化并自动化在低功耗多核 SoC 上的各个部署阶段，同时研究了学术界（NEMO + DORY）和工业界（GreenWaves 的 GAPflow）的工具集。相比 v1 版本，我们将 PULP-Dronet 的内存占用减少了一半，采用 8 位定点量化，推理速度提升了 1.6 倍，且预测准确率与原始手工设计的 CNN 相当。我们完全自动化的部署方法使我们能够先将 DroNet 部署到 AI-Deck 上，进而证明其可在 CrazyFlie 2.1 上以仅 35–102 mW 的功耗运行 CNN，同时达到每秒 9–17 帧的吞吐量！\n\n特性总结：\n\n- **硬件：** [*AI-deck*](https:\u002F\u002Fstore.bitcraze.io\u002Fproducts\u002Fai-deck)\n\n- **深度学习框架：** Pytorch\n\n- **量化：** 8 位定点数，完全自动化（结合学术界的 [NEMO](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fnemo) 和工业界的 [NNTool](https:\u002F\u002Fgreenwaves-technologies.com\u002Fsdk-manuals\u002Fnn_quick_start_guide\u002F)）\n\n- **部署：** 全自动化（结合学术界的 [DORY](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fdory) 和工业界的 [AutoTiler](https:\u002F\u002Fgreenwaves-technologies.com\u002Fsdk-manuals\u002Fnn_quick_start_guide\u002F)）\n\n我们在此以开源形式发布了所有代码、硬件设计和训练好的网络。\n\n### **PULP-Dronet v3：**\n在纳米级无人机上实现人工智能多任务感知面临巨大挑战。由于纳米级无人机的有效载荷极其有限，它们通常只能搭载超低功耗微控制器单元，这些单元在计算能力和内存方面都有严格限制，从而阻碍了在机载平台上部署多个 AI 任务。因此，我们专注于优化和最小化 AI 工作负载，同时确保无人机在真实场景测试中的性能不受影响。与 v2 版本相比，v3 版本的推理速度提升了 8.5 倍，推理吞吐量达到每秒 139 帧。此外，我们还开发了一套针对纳米级无人机的数据采集方法，仅利用机载资源收集统一的避障和转向信息，不依赖任何外部基础设施。由此生成的 PULP-Dronet v3 数据集包含 66,000 张标注图像。\n\n我们在此公开发布所有开源代码，包括 PULP-Dronet v3 数据集、我们的数据采集框架以及训练好的网络。\n\n特性总结：\n\n- **硬件：** [*AI-deck*](https:\u002F\u002Fstore.bitcraze.io\u002Fproducts\u002Fai-deck)\n\n- **深度学习框架：** Pytorch\n\n- **量化：** 8 位定点数，完全自动化，使用学术界的 [NEMO](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fnemo) 工具。\n\n- **部署：** 完全自动化，使用学术界的 [DORY](https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fdory) 工具。\n\n- **[数据集](https:\u002F\u002Fzenodo.org\u002Frecords\u002F13348430)**：专为纳米无人机定制采集。\n\n## 许可证\n\n本仓库中的所有文件均为原创，采用 Apache-2.0 许可证。详情请参阅 [LICENSE](.\u002FLICENSE)。\n\n我们以开源方式发布该数据集（[zenodo.org\u002Frecords\u002F13348430](https:\u002F\u002Fzenodo.org\u002Frecords\u002F13348430)），许可协议为知识共享署名-非商业性使用-相同方式共享 4.0 国际版。\n\n外部模块的许可证说明请参见 [LICENSE_README.md](.\u002FLICENSE_README.md)。","# PULP-Dronet 快速上手指南\n\nPULP-Dronet 是一个基于深度学习的视觉导航引擎，专为纳米无人机（如 Crazyflie）设计。它能够在超低功耗的嵌入式芯片（GAP8 SoC）上运行卷积神经网络（CNN），实现完全自主的避障和导航，无需外部计算设备。\n\n## 1. 环境准备\n\n在开始之前，请确保您的开发环境满足以下要求：\n\n### 系统要求\n- **操作系统**: Linux (推荐 Ubuntu 20.04 或 22.04)。Windows 用户建议使用 WSL2 或虚拟机。\n- **硬件**: \n  - 开发主机：x86_64 架构 PC。\n  - 目标设备：Crazyflie 2.0\u002F2.1 无人机 + **AI-deck** (PULP-Dronet v2\u002Fv3 推荐) 或 **PULP-Shield** (v1)。\n\n### 前置依赖\n您需要安装 GreenWaves Technologies 的 SDK 工具链以及相关 Python 依赖。\n\n```bash\n# 更新系统包\nsudo apt-get update && sudo apt-get install -y git cmake build-essential python3 python3-pip python3-venv libusb-1.0-0-dev libftdi1-dev\n\n# 克隆 PULP-SDK (以 GAP9\u002FGAP8 为例，具体版本请参考项目最新文档)\ngit clone https:\u002F\u002Fgithub.com\u002FGreenWaves-Technologies\u002Fgap_sdk.git\ncd gap_sdk\nsource configs\u002Fai_deck.sh  # 根据硬件选择配置，AI-deck 通常使用此配置\nmake sdk\n\n# 设置环境变量 (建议添加到 ~\u002F.bashrc)\nexport GAP_SDK_HOME=$(pwd)\nsource configs\u002Fai_deck.sh\n\n# 安装 Python 依赖 (在项目根目录执行)\npip3 install torch torchvision torchaudio --index-url https:\u002F\u002Fdownload.pytorch.org\u002Fwhl\u002Fcpu\npip3 install numpy opencv-python matplotlib\n```\n\n> **提示**: 国内开发者若遇到 GitHub 克隆速度慢的问题，可使用镜像源加速：\n> `git clone https:\u002F\u002Fgitee.com\u002Fmirror\u002Fgithub\u002FGreenWaves-Technologies\u002Fgap_sdk.git` (如有同步镜像) 或使用代理。\n\n## 2. 安装步骤\n\n本项目包含多个版本（v1, v2, v3），以下以目前主流的 **PULP-Dronet v2\u002Fv3 (基于 AI-deck)** 为例进行安装。\n\n```bash\n# 1. 克隆项目仓库\ngit clone https:\u002F\u002Fgithub.com\u002Fpulp-platform\u002Fpulp-dronet.git\ncd pulp-dronet\n\n# 2. 初始化子模块 (包含预训练模型和数据集)\ngit submodule update --init --recursive\n\n# 3. 编译部署工具 (确保已加载 GAP_SDK 环境)\n# 进入对应的版本目录，例如 v2\ncd v2_autotiler_toolchain\n\n# 4. 构建项目\nmake clean all\n```\n\n*注：如果您使用的是 PULP-Dronet v1 (PULP-Shield)，请进入 `v1_handcrafted_toolchain` 目录并参考该目录下的 README 进行基于 Tensorflow\u002FKeras 的构建。*\n\n## 3. 基本使用\n\n以下示例展示如何加载预训练模型并在 AI-deck 上运行推理测试。\n\n### 步骤 A: 刷写固件到 AI-deck\n\n确保 Crazyflie 已通过 USB 或 Radio 连接，并使用 `cfclient` 或 `make` 命令刷写二进制文件。\n\n```bash\n# 假设已连接设备，将编译好的二进制文件刷入 Flash\nmake flash\n```\n\n### 步骤 B: 运行主机端控制脚本\n\n在主机上运行 Python 脚本来接收无人机的遥测数据并发送控制指令（需保持 SDK 环境激活）。\n\n```bash\n# 激活虚拟环境 (如果创建了的话)\n# source venv\u002Fbin\u002Factivate \n\n# 运行简单的自主飞行测试脚本\ncd ..\u002Fhost_controller\npython3 run_autonomous_flight.py --uri radio:\u002F\u002F0\u002F80\u002F2M\n```\n\n### 步骤 C: 验证输出\n\n- 观察终端输出，应能看到实时的帧率（FPS）和碰撞避免指令。\n- **性能指标参考**:\n  - **PULP-Dronet v2**: 约 9-17 FPS, 功耗 35-102mW。\n  - **PULP-Dronet v3**: 约 139 FPS (多任务优化), 速度提升 8.5 倍。\n\n### 自定义模型训练 (可选)\n\n如果您需要使用自己的数据集重新训练网络（基于 PyTorch）：\n\n```bash\ncd training\npython3 train_dronet.py --data_path .\u002Fmy_dataset --epochs 50 --batch_size 32\n```\n\n训练完成后，使用提供的量化脚本将模型转换为 GAP8 支持的固定点格式（8-bit），然后重新编译部署。\n\n---\n*更多详细参数配置、数据集下载及硬件原理图，请参阅项目根目录下的完整文档及各版本子目录说明。*","某农业科研团队需要在茂密的果园冠层内部，利用口袋大小的微型无人机自主采集病虫害图像数据。\n\n### 没有 pulp-dronet 时\n- **依赖昂贵硬件**：为了实现视觉避障，必须搭载沉重的英伟达 Jetson 等高性能计算模块，导致微型无人机续航从 15 分钟骤降至 3 分钟。\n- **通信延迟风险**：采用“图传回地面站处理再返回指令”的模式，在树枝遮挡信号时极易发生链路中断，导致炸机。\n- **动态适应力差**：预设的固定航线无法应对随风摆动的树枝或突然出现的鸟类，缺乏实时感知与决策能力。\n- **部署成本高昂**：每次更换果园场景都需要重新进行复杂的参数调优和路径规划，难以大规模复制。\n\n### 使用 pulp-dronet 后\n- **端侧超低功耗运行**：pulp-dronet 将深度学习模型蒸馏并部署在 PULP 芯片上，仅需 64mW 功耗即可实现机载实时推理，恢复了无人机原有的长续航能力。\n- **完全自主闭环控制**：视觉导航引擎直接在无人机端完成“感知 - 决策 - 控制”闭环，无需外部通信，即使在信号盲区也能灵活穿梭于复杂枝干间。\n- **智能动态避障**：基于深度神经网络，pulp-dronet 能实时识别并躲避移动障碍物（如摇摆的树枝），显著提升了在非结构化环境中的飞行安全性。\n- **泛化能力强**：凭借端到端的优化架构，pulp-dronet 在不同密度和光照条件的果园中无需大量重训即可快速适配，大幅降低了部署门槛。\n\npulp-dronet 的核心价值在于让资源受限的微型无人机具备了类似大型机器人的独立视觉导航智慧，真正实现了低功耗、高可靠的自主飞行。","https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fpulp-platform_pulp-dronet_daa15f5f.png","pulp-platform","https:\u002F\u002Foss.gittoolsai.com\u002Favatars\u002Fpulp-platform_013c819c.png",null,"https:\u002F\u002Fgithub.com\u002Fpulp-platform",[80,84,88,92],{"name":81,"color":82,"percentage":83},"C","#555555",65.8,{"name":85,"color":86,"percentage":87},"Python","#3572A5",31.8,{"name":89,"color":90,"percentage":91},"Makefile","#427819",1.7,{"name":93,"color":94,"percentage":95},"Shell","#89e051",0.8,592,170,"2026-04-03T08:29:45","Apache-2.0",5,"未说明","不需要 GPU。该工具专为嵌入式微控制器（GreenWaves GAP8 SoC）设计，旨在超低功耗下运行，所有计算在板载完成。","未说明（目标设备为纳米无人机，内存限制极严；开发\u002F训练环境需求未在提供的片段中列出）",{"notes":105,"python":101,"dependencies":106},"该项目主要面向嵌入式硬件（Crazyflie 2.0\u002F2.1 无人机搭配 PULP-Shield 或 AI-deck）。软件部分包含用于训练和自动部署到 GAP8 芯片的工具链。v1 使用手动 16 位定点量化，v2\u002Fv3 使用自动 8 位定点量化。推理直接在无人机上的微控制器进行，无需外部电脑或远程信号。具体开发环境的 Python 版本和依赖库需参考仓库中的具体代码目录或安装脚本，README 概述部分未提供详细版本号。",[107,108,109,110,111],"Pytorch (用于 v2\u002Fv3 版本)","Tensorflow\u002FKeras (用于 v1 版本)","AutoTiler \u002F NNTool (GreenWaves 部署工具)","DORY (学术部署工具)","NEMO (量化工具)",[13,14,15],[114,115,116,117,118,119,120,121,122],"autonomous-quadcoptor","cnn","artificial-intelligence","nano-uav","end-to-end-learning","closed-loop-control","deep-learning","pulp","riscv","2026-03-27T02:49:30.150509","2026-04-06T06:51:51.732733",[],[127,132],{"id":128,"version":129,"summary_zh":130,"released_at":131},106924,"pulp-dronet-v3","https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F10606040","2025-03-10T17:28:50",{"id":133,"version":134,"summary_zh":135,"released_at":136},106925,"pulp-dronet-v2","freeze pulp-dronet-v2 tag form 2021","2024-08-21T17:09:42"]