[{"data":1,"prerenderedAt":-1},["ShallowReactive",2],{"similar-roboflow--notebooks":3,"tool-roboflow--notebooks":61},[4,18,26,36,44,53],{"id":5,"name":6,"github_repo":7,"description_zh":8,"stars":9,"difficulty_score":10,"last_commit_at":11,"category_tags":12,"status":17},4358,"openclaw","openclaw\u002Fopenclaw","OpenClaw 是一款专为个人打造的本地化 AI 助手，旨在让你在自己的设备上拥有完全可控的智能伙伴。它打破了传统 AI 助手局限于特定网页或应用的束缚，能够直接接入你日常使用的各类通讯渠道，包括微信、WhatsApp、Telegram、Discord、iMessage 等数十种平台。无论你在哪个聊天软件中发送消息，OpenClaw 都能即时响应，甚至支持在 macOS、iOS 和 Android 设备上进行语音交互，并提供实时的画布渲染功能供你操控。\n\n这款工具主要解决了用户对数据隐私、响应速度以及“始终在线”体验的需求。通过将 AI 部署在本地，用户无需依赖云端服务即可享受快速、私密的智能辅助，真正实现了“你的数据，你做主”。其独特的技术亮点在于强大的网关架构，将控制平面与核心助手分离，确保跨平台通信的流畅性与扩展性。\n\nOpenClaw 非常适合希望构建个性化工作流的技术爱好者、开发者，以及注重隐私保护且不愿被单一生态绑定的普通用户。只要具备基础的终端操作能力（支持 macOS、Linux 及 Windows WSL2），即可通过简单的命令行引导完成部署。如果你渴望拥有一个懂你",349277,3,"2026-04-06T06:32:30",[13,14,15,16],"Agent","开发框架","图像","数据工具","ready",{"id":19,"name":20,"github_repo":21,"description_zh":22,"stars":23,"difficulty_score":10,"last_commit_at":24,"category_tags":25,"status":17},3808,"stable-diffusion-webui","AUTOMATIC1111\u002Fstable-diffusion-webui","stable-diffusion-webui 是一个基于 Gradio 构建的网页版操作界面，旨在让用户能够轻松地在本地运行和使用强大的 Stable Diffusion 图像生成模型。它解决了原始模型依赖命令行、操作门槛高且功能分散的痛点，将复杂的 AI 绘图流程整合进一个直观易用的图形化平台。\n\n无论是希望快速上手的普通创作者、需要精细控制画面细节的设计师，还是想要深入探索模型潜力的开发者与研究人员，都能从中获益。其核心亮点在于极高的功能丰富度：不仅支持文生图、图生图、局部重绘（Inpainting）和外绘（Outpainting）等基础模式，还独创了注意力机制调整、提示词矩阵、负向提示词以及“高清修复”等高级功能。此外，它内置了 GFPGAN 和 CodeFormer 等人脸修复工具，支持多种神经网络放大算法，并允许用户通过插件系统无限扩展能力。即使是显存有限的设备，stable-diffusion-webui 也提供了相应的优化选项，让高质量的 AI 艺术创作变得触手可及。",162132,"2026-04-05T11:01:52",[14,15,13],{"id":27,"name":28,"github_repo":29,"description_zh":30,"stars":31,"difficulty_score":32,"last_commit_at":33,"category_tags":34,"status":17},1381,"everything-claude-code","affaan-m\u002Feverything-claude-code","everything-claude-code 是一套专为 AI 编程助手（如 Claude Code、Codex、Cursor 等）打造的高性能优化系统。它不仅仅是一组配置文件，而是一个经过长期实战打磨的完整框架，旨在解决 AI 代理在实际开发中面临的效率低下、记忆丢失、安全隐患及缺乏持续学习能力等核心痛点。\n\n通过引入技能模块化、直觉增强、记忆持久化机制以及内置的安全扫描功能，everything-claude-code 能显著提升 AI 在复杂任务中的表现，帮助开发者构建更稳定、更智能的生产级 AI 代理。其独特的“研究优先”开发理念和针对 Token 消耗的优化策略，使得模型响应更快、成本更低，同时有效防御潜在的攻击向量。\n\n这套工具特别适合软件开发者、AI 研究人员以及希望深度定制 AI 工作流的技术团队使用。无论您是在构建大型代码库，还是需要 AI 协助进行安全审计与自动化测试，everything-claude-code 都能提供强大的底层支持。作为一个曾荣获 Anthropic 黑客大奖的开源项目，它融合了多语言支持与丰富的实战钩子（hooks），让 AI 真正成长为懂上",143909,2,"2026-04-07T11:33:18",[14,13,35],"语言模型",{"id":37,"name":38,"github_repo":39,"description_zh":40,"stars":41,"difficulty_score":32,"last_commit_at":42,"category_tags":43,"status":17},2271,"ComfyUI","Comfy-Org\u002FComfyUI","ComfyUI 是一款功能强大且高度模块化的视觉 AI 引擎，专为设计和执行复杂的 Stable Diffusion 图像生成流程而打造。它摒弃了传统的代码编写模式，采用直观的节点式流程图界面，让用户通过连接不同的功能模块即可构建个性化的生成管线。\n\n这一设计巧妙解决了高级 AI 绘图工作流配置复杂、灵活性不足的痛点。用户无需具备编程背景，也能自由组合模型、调整参数并实时预览效果，轻松实现从基础文生图到多步骤高清修复等各类复杂任务。ComfyUI 拥有极佳的兼容性，不仅支持 Windows、macOS 和 Linux 全平台，还广泛适配 NVIDIA、AMD、Intel 及苹果 Silicon 等多种硬件架构，并率先支持 SDXL、Flux、SD3 等前沿模型。\n\n无论是希望深入探索算法潜力的研究人员和开发者，还是追求极致创作自由度的设计师与资深 AI 绘画爱好者，ComfyUI 都能提供强大的支持。其独特的模块化架构允许社区不断扩展新功能，使其成为当前最灵活、生态最丰富的开源扩散模型工具之一，帮助用户将创意高效转化为现实。",107888,"2026-04-06T11:32:50",[14,15,13],{"id":45,"name":46,"github_repo":47,"description_zh":48,"stars":49,"difficulty_score":32,"last_commit_at":50,"category_tags":51,"status":17},4721,"markitdown","microsoft\u002Fmarkitdown","MarkItDown 是一款由微软 AutoGen 团队打造的轻量级 Python 工具，专为将各类文件高效转换为 Markdown 格式而设计。它支持 PDF、Word、Excel、PPT、图片（含 OCR）、音频（含语音转录）、HTML 乃至 YouTube 链接等多种格式的解析，能够精准提取文档中的标题、列表、表格和链接等关键结构信息。\n\n在人工智能应用日益普及的今天，大语言模型（LLM）虽擅长处理文本，却难以直接读取复杂的二进制办公文档。MarkItDown 恰好解决了这一痛点，它将非结构化或半结构化的文件转化为模型“原生理解”且 Token 效率极高的 Markdown 格式，成为连接本地文件与 AI 分析 pipeline 的理想桥梁。此外，它还提供了 MCP（模型上下文协议）服务器，可无缝集成到 Claude Desktop 等 LLM 应用中。\n\n这款工具特别适合开发者、数据科学家及 AI 研究人员使用，尤其是那些需要构建文档检索增强生成（RAG）系统、进行批量文本分析或希望让 AI 助手直接“阅读”本地文件的用户。虽然生成的内容也具备一定可读性，但其核心优势在于为机器",93400,"2026-04-06T19:52:38",[52,14],"插件",{"id":54,"name":55,"github_repo":56,"description_zh":57,"stars":58,"difficulty_score":10,"last_commit_at":59,"category_tags":60,"status":17},4487,"LLMs-from-scratch","rasbt\u002FLLMs-from-scratch","LLMs-from-scratch 是一个基于 PyTorch 的开源教育项目，旨在引导用户从零开始一步步构建一个类似 ChatGPT 的大型语言模型（LLM）。它不仅是同名技术著作的官方代码库，更提供了一套完整的实践方案，涵盖模型开发、预训练及微调的全过程。\n\n该项目主要解决了大模型领域“黑盒化”的学习痛点。许多开发者虽能调用现成模型，却难以深入理解其内部架构与训练机制。通过亲手编写每一行核心代码，用户能够透彻掌握 Transformer 架构、注意力机制等关键原理，从而真正理解大模型是如何“思考”的。此外，项目还包含了加载大型预训练权重进行微调的代码，帮助用户将理论知识延伸至实际应用。\n\nLLMs-from-scratch 特别适合希望深入底层原理的 AI 开发者、研究人员以及计算机专业的学生。对于不满足于仅使用 API，而是渴望探究模型构建细节的技术人员而言，这是极佳的学习资源。其独特的技术亮点在于“循序渐进”的教学设计：将复杂的系统工程拆解为清晰的步骤，配合详细的图表与示例，让构建一个虽小但功能完备的大模型变得触手可及。无论你是想夯实理论基础，还是为未来研发更大规模的模型做准备",90106,"2026-04-06T11:19:32",[35,15,13,14],{"id":62,"github_repo":63,"name":64,"description_en":65,"description_zh":66,"ai_summary_zh":67,"readme_en":68,"readme_zh":69,"quickstart_zh":70,"use_case_zh":71,"hero_image_url":72,"owner_login":73,"owner_name":74,"owner_avatar_url":75,"owner_bio":76,"owner_company":77,"owner_location":77,"owner_email":78,"owner_twitter":73,"owner_website":79,"owner_url":80,"languages":81,"stars":90,"forks":91,"last_commit_at":92,"license":77,"difficulty_score":32,"env_os":93,"env_gpu":94,"env_ram":93,"env_deps":95,"category_tags":102,"github_topics":103,"view_count":32,"oss_zip_url":77,"oss_zip_packed_at":77,"status":17,"created_at":124,"updated_at":125,"faqs":126,"releases":155},5169,"roboflow\u002Fnotebooks","notebooks","A collection of tutorials on state-of-the-art computer vision models and techniques. Explore everything from foundational architectures like ResNet to cutting-edge models like RF-DETR, YOLO11, SAM 3, and Qwen3-VL.","notebooks 是 Roboflow 维护的一个开源教程集合，旨在帮助开发者快速掌握前沿的计算机视觉技术。它收录了数十个基于 Jupyter Notebook 的实战案例，内容覆盖从经典的 ResNet 架构到最新的 YOLO11、SAM 3、RF-DETR 以及 Qwen3-VL 等多模态大模型。\n\n对于希望将理论转化为实践的开发者而言，notebooks 解决了“上手难”和“环境配置复杂”的痛点。用户无需从零搭建实验环境或摸索模型接口，即可直接通过 Google Colab、Kaggle 等平台一键运行代码，学习如何高效完成目标检测、图像分割、姿态估计、OCR 文字识别及数据提取等核心任务。每个教程不仅提供可执行的代码，还往往配有视频讲解和相关论文链接，形成了完整的学习闭环。\n\n该资源特别适合计算机视觉领域的工程师、研究人员以及正在学习深度学习的学生使用。无论是想要快速验证新模型效果，还是寻求特定视觉任务的实现方案，都能在这里找到清晰的指引。其最大的亮点在于更新迅速且紧跟社区潮流，确保用户能第一时间接触到并应用业界最先进（SOTA）的模型技术，是探索计算机视觉世界的高效入门指南","notebooks 是 Roboflow 维护的一个开源教程集合，旨在帮助开发者快速掌握前沿的计算机视觉技术。它收录了数十个基于 Jupyter Notebook 的实战案例，内容覆盖从经典的 ResNet 架构到最新的 YOLO11、SAM 3、RF-DETR 以及 Qwen3-VL 等多模态大模型。\n\n对于希望将理论转化为实践的开发者而言，notebooks 解决了“上手难”和“环境配置复杂”的痛点。用户无需从零搭建实验环境或摸索模型接口，即可直接通过 Google Colab、Kaggle 等平台一键运行代码，学习如何高效完成目标检测、图像分割、姿态估计、OCR 文字识别及数据提取等核心任务。每个教程不仅提供可执行的代码，还往往配有视频讲解和相关论文链接，形成了完整的学习闭环。\n\n该资源特别适合计算机视觉领域的工程师、研究人员以及正在学习深度学习的学生使用。无论是想要快速验证新模型效果，还是寻求特定视觉任务的实现方案，都能在这里找到清晰的指引。其最大的亮点在于更新迅速且紧跟社区潮流，确保用户能第一时间接触到并应用业界最先进（SOTA）的模型技术，是探索计算机视觉世界的高效入门指南。","\u003Cdiv align=\"center\">\n  \u003Cp>\n    \u003Ca align=\"center\" href=\"\" target=\"_blank\">\n      \u003Cimg\n        width=\"850\"\n        src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_46bf7f4d3f30.png\"\n      >\n    \u003C\u002Fa>\n  \u003C\u002Fp>\n  \u003Cbr>\n\n  [notebooks](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks) | [inference](https:\u002F\u002Fgithub.com\u002Froboflow\u002Finference) | [autodistill](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill) | [RF-DETR](https:\u002F\u002Fgithub.com\u002Froboflow\u002Frf-detr)\n\n  \u003Cbr>\n\n  \u003Cdiv align=\"center\">\n      \u003Ca href=\"https:\u002F\u002Fyoutube.com\u002Froboflow\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_4a6072846419.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Froboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_4609eef41cf4.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fwww.linkedin.com\u002Fcompany\u002Froboflow-ai\u002F\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_c2b5b316fb59.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fdocs.roboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_b11ef9eeaca7.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fdiscuss.roboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_171d8d7d7d7b.png\"\n            width=\"3%\"\n          \u002F>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fblog.roboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_aace967bbd3c.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003C\u002Fa>\n  \u003C\u002Fdiv>\n\n\u003C\u002Fdiv>\n\n## 👋 hello\n\nThis repository offers a growing collection of computer vision tutorials. Learn to use SOTA models like YOLOv11, SAM 2, Florence-2, PaliGemma 2, and Qwen2.5-VL for tasks ranging from object detection, segmentation, and pose estimation to data extraction and OCR. Dive in and explore the exciting world of computer vision!\n\n\u003C!--- AUTOGENERATED-NOTEBOOKS-TABLE -->\n\u003C!---\n   WARNING: DO NOT EDIT THIS TABLE MANUALLY. IT IS AUTOMATICALLY GENERATED.\n   HEAD OVER TO CONTRIBUTING.MD FOR MORE DETAILS ON HOW TO MAKE CHANGES PROPERLY.\n-->\n## 🚀 model tutorials (59 notebooks)\n| **notebook** | **open in colab \u002F kaggle \u002F sagemaker studio lab** | **complementary materials** | **repository \u002F paper** |\n|:------------:|:-------------------------------------------------:|:---------------------------:|:----------------------:|\n| [How to Perform OCR with GLM-OCR](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-perform-ocr-with-glm-ocr.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-perform-ocr-with-glm-ocr.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-perform-ocr-with-glm-ocr.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=_KCXD8vFIYM) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fzai-org\u002FGLM-OCR) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2603.10910-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2603.10910)|\n| [How to Track Objects with RF-DETR and ByteTrack Tracker](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [Fine-Tune YOLO26 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolo26-custom-data\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=sczVknSvzh4a) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLO26 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-instance-segmentation-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-instance-segmentation-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-instance-segmentation-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolo26-instance-segmentation-custom-data\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=sczVknSvzh4a) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| Segment Images with SAM3 | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-segment-anything-3.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-segment-anything-3.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fsam3\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=G1AEuFwQrWU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsam3) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.16719-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.16719)|\n| Segment Videos with SAM3 | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-segment-anything-3.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-segment-anything-3.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fsam3\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=G1AEuFwQrWU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsam3) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.16719-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.16719)|\n| [Open Vocabulary Object Detection with Qwen3-VL](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopen-vocabulary-object-detection-with-qwen3-vl.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopen-vocabulary-object-detection-with-qwen3-vl.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopen-vocabulary-object-detection-with-qwen3-vl.ipynb)  |   | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FQwenLM\u002FQwen3-VL) |\n| [Fine-Tune RF-DETR Segmentation on Custom Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-segmentation-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-segmentation-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-segmentation-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Frf-detr-segmentation) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=boVmetUahes) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Frf-detr) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.09554-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.09554)|\n| [Zero-Shot Object Detection and Segmentation with Google Gemini 2.5](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-google-gamini-2-5.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-google-gamini-2-5.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-google-gamini-2-5.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fgemini-2-5-object-detection-segmentation\u002F)  |  [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2507.06261-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2507.06261)|\n| [Fine-Tune RF-DETR on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Frf-detr) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Flive\u002FyHW0ip-2i54) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Frf-detr) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.09554-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.09554)|\n| [Zero-Shot Object Detection and Segmentation with YOLOE](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-yoloe.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-yoloe.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-yoloe.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyoloe-zero-shot-object-detection-segmentation\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=eHAnIehnCt4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FTHU-MIG\u002Fyoloe) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2503.07465-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2503.07465)|\n| [Fine-Tune YOLOv12 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov12-object-detection-model.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov12-object-detection-model.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov12-object-detection-model.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov12-model)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fsunsmarterjie\u002Fyolov12) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2502.12524-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2502.12524)|\n| [Zero-Shot Object Detection with Qwen2.5-VL](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-qwen2-5-vl.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-qwen2-5-vl.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-qwen2-5-vl.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fqwen2-5-vl-zero-shot-object-detection\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=xEfh0IR8Fvo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FQwenLM\u002FQwen2.5-VL) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2502.13923-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2502.13923)|\n| [Fine-Tune Qwen2.5-VL for JSON Data Extraction](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-qwen2-5-vl-for-json-data-extraction.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-qwen2-5-vl-for-json-data-extraction.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-qwen2-5-vl-for-json-data-extraction.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=xEfh0IR8Fvo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FQwenLM\u002FQwen2.5-VL) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2502.13923-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2502.13923)|\n| [Fine-Tune PaliGemma2 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-paligemma-2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2412.03555-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2412.03555)|\n| [Fine-Tune PaliGemma2 for JSON Data Extraction](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-for-json-data-extraction.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-for-json-data-extraction.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-for-json-data-extraction.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-paligemma-2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2412.03555-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2412.03555)|\n| [Fine-Tune PaliGemma2 for LaTeX OCR](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-latex-ocr-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-latex-ocr-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-latex-ocr-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-paligemma-2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2412.03555-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2412.03555)|\n| [Fine-Tune SAM-2.1](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffine-tune-sam-2.1.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffine-tune-sam-2.1.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffine-tune-sam-2.1.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-sam-2-1\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=QnCGcFHZy9s) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsam2) |\n| [Fine-Tune GPT-4o on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopenai-gpt-4o-fine-tuning.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopenai-gpt-4o-fine-tuning.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopenai-gpt-4o-fine-tuning.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fgpt-4o-object-detection\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=6Q6TieCBA4E) |  |\n| [Fine-Tune YOLO11 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov11-how-to-train-custom-data\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=jE_s4tVgPHA) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLO11 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-instance-segmentation-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-instance-segmentation-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-instance-segmentation-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolo11-segmentation\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=jE_s4tVgPHA) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Segment Images with SAM2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-sam-2.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-sam-2.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-sam-2.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-segment-anything-2\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Flive\u002FDv003fTyO-Y) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything-2) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2408.00714-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2408.00714)|\n| [Segment Videos with SAM2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-sam-2.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-sam-2.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-sam-2.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-segment-anything-2\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Flive\u002FDv003fTyO-Y) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything-2) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2408.00714-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2408.00714)|\n| [Fine-Tune RT-DETR on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rt-detr-on-custom-dataset-with-transformers.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rt-detr-on-custom-dataset-with-transformers.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rt-detr-on-custom-dataset-with-transformers.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-rt-detr-custom-dataset-transformers\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Flyuwenyu\u002FRT-DETR) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.08069-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.08069)|\n| [Fine-Tune Florence-2 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-florence-2-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-florence-2-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-florence-2-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-florence-2-object-detection\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=i3KjYgxNH6w) |  [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2311.06242-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2311.06242)|\n| [Run Different Vision Tasks with Florence-2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-run-different-vision-tasks-with-florence-2.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-run-different-vision-tasks-with-florence-2.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-run-different-vision-tasks-with-florence-2.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fflorence-2\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=hj_ybcRdk5Y) |  [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2311.06242-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2311.06242)|\n| [Fine-Tune PaliGemma on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-fine-tune-paligemma\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=OMBmVInx68M) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2407.07726-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2407.07726)|\n| [Fine-Tune YOLOv10 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov10-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov10-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov10-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov10-how-to-train\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FTHU-MIG\u002Fyolov10) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2405.14458-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2405.14458)|\n| [Zero-Shot Object Detection with YOLO-World](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-yolo-world.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-yolo-world.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-yolo-world.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-yolo-world\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=X7gKBGVz4vs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FAILab-CVC\u002FYOLO-World) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2401.17270-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2401.17270)|\n| [Fine-Tune YOLOv9 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov9-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov9-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov9-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov9-model) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FXHT2c8jT3Bc) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov9) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2402.13616-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2402.13616)|\n| [Fine-Tune RTMDet on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rtmdet-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rtmdet-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rtmdet-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-rtmdet-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002F5kgWyo6Sg4E) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fopen-mmlab\u002Fmmdetection) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2212.07784-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2212.07784)|\n| [Segment Images with FastSAM](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-fast-sam.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-fast-sam.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-fast-sam.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-use-fastsam) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FyHNPyqazYYU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FCASIA-IVA-Lab\u002FFastSAM) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2306.12156-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2306.12156)|\n| [Fine-Tune YOLO-NAS on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo-nas-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo-nas-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo-nas-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolo-nas-how-to-train-on-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FV-H3eoPUnA8) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FDeci-AI\u002Fsuper-gradients\u002Fblob\u002Fmaster\u002FYOLONAS.md) |\n| [Segment Images with Segment Anything Model (SAM)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-sam.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-sam.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-sam.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-use-segment-anything-model-sam) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FD-D6ZmadzPE) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.02643-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.02643)|\n| [Zero-Shot Object Detection with Grounding DINO](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-grounding-dino.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-grounding-dino.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-grounding-dino.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fgrounding-dino-zero-shot-object-detection) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FcMa77r3YrDk) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FIDEA-Research\u002FGroundingDINO) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2303.05499-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2303.05499)|\n| [Fine-Tune DETR Transformer on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-huggingface-detr-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-huggingface-detr-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-huggingface-detr-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fyoutu.be\u002FAM8D4j9KoaU) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FAM8D4j9KoaU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdetr) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2005.12872-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2005.12872)|\n| [Classify Images with DINOv2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-classification.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-classification.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-classification.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-classify-images-with-dinov2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdinov2\u002F) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.07193-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.07193)|\n| [Fine-Tune YOLOv8 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov8-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FwuZtUMEiKWY) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Pose Estimation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-keypoint.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-keypoint.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-keypoint.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-a-custom-yolov8-pose-estimation-model\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Oriented Bounding Boxes (OBB) Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-obb.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-obb.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-obb.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002Fblog.roboflow.com\u002Ftrain-yolov8-obb-model\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-instance-segmentation-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-instance-segmentation-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-instance-segmentation-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov8-instance-segmentation\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FpFiGSrRtaU4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-classification-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-classification-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-classification-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-a-yolov8-classification-model\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv7 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov7-custom-dataset-training-tutorial) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=5nsmXLyDaU4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov7) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2207.02696-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.02696)|\n| [Fine-Tune YOLOv7 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-instance-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-instance-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-instance-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov7-instance-segmentation-on-custom-data) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=vFGxM2KLs10) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov7) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2207.02696-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.02696)|\n| [Fine-Tune MT-YOLOv6 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov6-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=fFCWrMFH2UY) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fmeituan\u002FYOLOv6) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2209.02976-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2209.02976)|\n| [Fine-Tune YOLOv5 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov5-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002Fwatch?v=x0ThXHbtqCQ) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fyolov5) |\n| [Fine-Tune YOLOv5 on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-classification-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-classification-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-classification-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov5-classification-custom-data) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=DPjp9Kq4qn8) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fyolov5) |\n| [Fine-Tune YOLOv5 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-instance-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-instance-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-instance-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov5-instance-segmentation-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=vKzfvtEtiYo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fyolov5) |\n| [Fine-Tune Faster RCNN on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-detectron2-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-detectron2-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-detectron2-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-detectron2) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002Fe8LPflX0nwQ) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdetectron2) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-1703.06870v3-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F1703.06870v3)|\n| [Fine-Tune SegFormer on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segformer-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segformer-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segformer-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-segformer-on-a-custom-dataset-with-pytorch-lightning) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=4HNkBMfw-2o) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FNVlabs\u002FSegFormer) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2105.15203v3-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2105.15203v3)|\n| [Fine-Tune ViT on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-vision-transformer-classification-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-vision-transformer-classification-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-vision-transformer-classification-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-vision-transformer) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=8yRE2Pa-8_I) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Flucidrains\u002Fvit-pytorch) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2010.11929-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2010.11929)|\n| [Fine-Tune Scaled-YOLOv4 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-scaled-yolov4-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-scaled-yolov4-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-scaled-yolov4-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-scaled-yolov4) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=rEbpKxZbvIo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002FScaledYOLOv4) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2004.10934-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2004.10934)|\n| [Fine-Tune YOLOS on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolos-huggingface-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolos-huggingface-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolos-huggingface-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolos-transformer-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=N0V0xxSi6Xc) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fhuggingface\u002Ftransformers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2106.00666-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.00666)|\n| [Fine-Tune YOLOR on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolor-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolor-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolor-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolor-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=sZ5DiXDOHEM) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fyolor) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-1506.02640-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F1506.02640)|\n| [Fine-Tune YOLOX on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolox-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolox-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolox-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolox-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=q3RbFbaQQGw) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FMegvii-BaseDetection\u002FYOLOX) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2107.08430-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2107.08430)|\n| [Fine-Tune ResNet34 on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-resnet34-classification.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-resnet34-classification.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-resnet34-classification.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-a-custom-resnet34-model) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=93kXzUOiYY4) |  |\n| [Image Classification with OpenAI Clip](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-openai-clip-classification.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-openai-clip-classification.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-openai-clip-classification.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-use-openai-clip) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=8o701AEoZ8I) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fopenai\u002FCLIP) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2103.00020-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2103.00020)|\n| [Fine-Tune YOLOv4-tiny Darknet on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov4-tiny-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov4-tiny-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov4-tiny-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.ai\u002Ftrain-yolov4-tiny-on-custom-data-lighting-fast-detection) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=NTnZgLsk_DA) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fdarknet) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2011.04244-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2011.04244)|\n| [Train a YOLOv8 Classification Model with No Labeling](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-train-yolov8-classification-no-labeling.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-train-yolov8-classification-no-labeling.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-train-yolov8-classification-no-labeling.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-classification-model-no-labeling\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n## 📍 tracker tutorials (3 notebooks)\n| **notebook** | **open in colab \u002F kaggle \u002F sagemaker studio lab** | **complementary materials** | **repository \u002F paper** |\n|:------------:|:-------------------------------------------------:|:---------------------------:|:----------------------:|\n| [How to Track Objects with RF-DETR and ByteTrack Tracker](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [How to Track Objects with RF-DETR and SORT Tracker](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-sort-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-sort-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-sort-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-1602.00763-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F1602.00763)|\n| [How to Track Objects with RF-DETR and OC-SORT Tracker](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-ocsort-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-ocsort-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-ocsort-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2206.14360-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2206.14360)|\n## 🛠️ computer vision skills (23 notebooks)\n| **notebook** | **open in colab \u002F kaggle \u002F sagemaker studio lab** | **complementary materials** | **repository \u002F paper** |\n|:------------:|:-------------------------------------------------:|:---------------------------:|:----------------------:|\n| [Basketball AI: Detect NBA 3 Second Violation](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-automatic-detection-of-3-second-violations.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-automatic-detection-of-3-second-violations.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-automatic-detection-of-3-second-violations.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fdetect-3-second-violation-ai-basketball)  |  |\n| [Basketball AI: How to Detect Track and Identify Basketball Players](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-how-to-detect-track-and-identify-basketball-players.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-how-to-detect-track-and-identify-basketball-players.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-how-to-detect-track-and-identify-basketball-players.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fidentify-basketball-players) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=yGQb9KkvQ1Q) |  |\n| [Football AI](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffootball-ai.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffootball-ai.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffootball-ai.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fcamera-calibration-sports-computer-vision\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FaBVGKoNZQUw) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsports) |\n| [Auto-Annotate Dataset with GroundedSAM 2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fgrounded-sam-2-auto-label.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fgrounded-sam-2-auto-label.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fgrounded-sam-2-auto-label.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-segment-anything-2)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything-2) |\n| [Run YOLOv7 Object Detection with OpenVINO + TorchORT](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Faccelerate-pytorch-openvino-torch-ort)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fyolov7) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2207.02696-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.02696)|\n| [Estimate Vehicle Speed with YOLOv8](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-estimate-vehicle-speed-with-computer-vision.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-estimate-vehicle-speed-with-computer-vision.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-estimate-vehicle-speed-with-computer-vision.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Festimate-speed-computer-vision\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FuWP6UjDeZvY) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsupervision\u002Ftree\u002Fdevelop\u002Fexamples\u002Fspeed_estimation) |\n| [Detect and Count Objects in Polygon Zone with YOLOv5 \u002F YOLOv8 \u002F Detectron2 + Supervision](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-detect-and-count-objects-in-polygon-zone.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-detect-and-count-objects-in-polygon-zone.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-detect-and-count-objects-in-polygon-zone.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-count-objects-in-a-zone) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002Fl_kf9CfZ_8M) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsupervision) |\n| [Track and Count Vehicles with YOLOv8 + ByteTRACK + Supervision](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-and-count-vehicles-with-yolov8.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-and-count-vehicles-with-yolov8.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-and-count-vehicles-with-yolov8.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov8-tracking-and-counting\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FOS5qI9YBkfk) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsupervision) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [Football Players Tracking with YOLOv5 + ByteTRACK](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-football-players.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-football-players.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-football-players.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrack-football-players) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FQCG8QMhga9k) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fifzhang\u002FByteTrack) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [Auto Train YOLOv8 Model with Autodistill](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-auto-train-yolov8-model-with-autodistill.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-auto-train-yolov8-model-with-autodistill.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-auto-train-yolov8-model-with-autodistill.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fautodistill) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FgKTYMfwPo4M) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill) |\n| [Image Embeddings Analysis - Part 1](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage_embeddings_analysis_part_1.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage_embeddings_analysis_part_1.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage_embeddings_analysis_part_1.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FYxJkE6FvGF4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fopenai\u002FCLIP) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2103.00020-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2103.00020)|\n| [Automated Dataset Annotation and Evaluation with Grounding DINO and SAM](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino-and-sam.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino-and-sam.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino-and-sam.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fenhance-image-annotation-with-grounding-dino-and-sam\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FoEQYStnF2l8) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FIDEA-Research\u002FGroundingDINO) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2303.05499-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2303.05499)|\n| [Automated Dataset Annotation and Evaluation with Grounding DINO](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FC4NqaRBz_Kw) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FIDEA-Research\u002FGroundingDINO) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2303.05499-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2303.05499)|\n| [Roboflow Video Inference with Custom Annotators](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Froboflow_video_inference_with_custom_annotators.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Froboflow_video_inference_with_custom_annotators.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Froboflow_video_inference_with_custom_annotators.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fcustom-annotator-video-inference)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Finference) |\n| [DINO-GPT-4V Object Detection](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdino-gpt4v-autodistill.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdino-gpt4v-autodistill.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdino-gpt4v-autodistill.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fdino-gpt-4v\u002F)  |  |\n| [Train a Segmentation Model with No Labeling](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segmentation-model-with-no-labeling.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segmentation-model-with-no-labeling.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segmentation-model-with-no-labeling.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-a-segmentation-model-no-labeling\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill) |\n| [DINOv2 Image Retrieval](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-image-retrieval.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-image-retrieval.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-image-retrieval.ipynb)  |   | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdinov2\u002F) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.07193-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.07193)|\n| [Vector Analysis with Scikit-learn and Bokeh](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fvector-analysis-with-sklearn-and-bokeh.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fvector-analysis-with-sklearn-and-bokeh.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fvector-analysis-with-sklearn-and-bokeh.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fvector-analysis)  |  |\n| [RF100 Object Detection Model Benchmarking](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-rf100.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-rf100.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-rf100.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Froboflow-100) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FjIgZMr-PBMo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Froboflow-100-benchmark) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2211.13523-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2211.13523)|\n| [Create Segmentation Masks with Roboflow](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-generate-segmentation-mask-with-roboflow.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-generate-segmentation-mask-with-roboflow.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-generate-segmentation-mask-with-roboflow.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-create-segmentation-masks-with-roboflow)  |  |\n| [How to Use PolygonZone and Roboflow Supervision](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-polygonzone-annotate-and-supervision.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-polygonzone-annotate-and-supervision.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-polygonzone-annotate-and-supervision.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fpolygonzone\u002F)  |  |\n| [Train a Package Detector With Two Labeled Images](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-package-detector-two-labeled-images.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-package-detector-two-labeled-images.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-package-detector-two-labeled-images.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fpackage-detector\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill-seggpt) |\n| [Image-to-Image Search with CLIP and faiss](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage-to-image-search-clip-faiss.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage-to-image-search-clip-faiss.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage-to-image-search-clip-faiss.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fclip-image-search-faiss\u002F)  |  |\n\u003C!--- AUTOGENERATED-NOTEBOOKS-TABLE -->\n\n## 🎬 videos\n\nAlmost every week we create tutorials showing you the hottest models in Computer Vision. 🔥\n[Subscribe](https:\u002F\u002Fwww.youtube.com\u002F@Roboflow), and stay up to date with our latest YouTube videos!\n\n\u003Cp align=\"left\">\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FCilXrt3S-ws\" title=\"How to Choose the Best Computer Vision Model for Your Project\">\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_20ae1a707d43.png\" alt=\"How to Choose the Best Computer Vision Model for Your Project\" width=\"300px\" align=\"left\" \u002F>\u003C\u002Fa>\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FCilXrt3S-ws\" title=\"How to Choose the Best Computer Vision Model for Your Project\">\u003Cstrong>How to Choose the Best Computer Vision Model for Your Project\u003C\u002Fstrong>\u003C\u002Fa>\n\u003Cdiv>\u003Cstrong>Created: 26 May 2023\u003C\u002Fstrong> | \u003Cstrong>Updated: 26 May 2023\u003C\u002Fstrong>\u003C\u002Fdiv>\n\u003Cbr\u002F> In this video, we will dive into the complexity of choosing the right computer vision model for your unique project. From the importance of high-quality datasets to hardware considerations, interoperability, benchmarking, and licensing issues, this video covers it all... \u003C\u002Fp> \u003Cbr\u002F>\n\n\u003Cp align=\"left\">\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FoEQYStnF2l8\" title=\"Accelerate Image Annotation with SAM and Grounding DINO\">\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_c1f71e4546cf.png\" alt=\"Accelerate Image Annotation with SAM and Grounding DINO\" width=\"300px\" align=\"left\" \u002F>\u003C\u002Fa>\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FoEQYStnF2l8\" title=\"Accelerate Image Annotation with SAM and Grounding DINO\">\u003Cstrong>Accelerate Image Annotation with SAM and Grounding DINO\u003C\u002Fstrong>\u003C\u002Fa>\n\u003Cdiv>\u003Cstrong>Created: 20 Apr 2023\u003C\u002Fstrong> | \u003Cstrong>Updated: 20 Apr 2023\u003C\u002Fstrong>\u003C\u002Fdiv>\n\u003Cbr\u002F> Discover how to speed up your image annotation process using Grounding DINO and Segment Anything Model (SAM). Learn how to convert object detection datasets into instance segmentation datasets, and see the potential of using these models to automatically annotate your datasets for real-time detectors like YOLOv8... \u003C\u002Fp> \u003Cbr\u002F>\n\u003Cp align=\"left\">\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FD-D6ZmadzPE\" title=\"SAM - Segment Anything Model by Meta AI: Complete Guide\">\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_edd70f5e3ac5.png\" alt=\"SAM - Segment Anything Model by Meta AI: Complete Guide\" width=\"300px\" align=\"left\" \u002F>\u003C\u002Fa>\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FD-D6ZmadzPE\" title=\"SAM - Segment Anything Model by Meta AI: Complete Guide\">\u003Cstrong>SAM - Segment Anything Model by Meta AI: Complete Guide\u003C\u002Fstrong>\u003C\u002Fa>\n\u003Cdiv>\u003Cstrong>Created: 11 Apr 2023\u003C\u002Fstrong> | \u003Cstrong>Updated: 11 Apr 2023\u003C\u002Fstrong>\u003C\u002Fdiv>\n\n\u003Cbr\u002F> Discover the incredible potential of Meta AI's Segment Anything Model (SAM)! We dive into SAM, an efficient and promptable model for image segmentation, which has revolutionized computer vision tasks. With over 1 billion masks on 11M licensed and privacy-respecting images, SAM's zero-shot performance is often superior to prior fully supervised results... \u003C\u002Fp>\n\n## 💻 run locally\n\nWe try to make it as easy as possible to run Roboflow Notebooks in Colab and Kaggle, but if you still want to run them\nlocally, below you will find instructions on how to do it. Remember don't install your dependencies globally, use\n[venv](https:\u002F\u002Fpackaging.python.org\u002Fen\u002Flatest\u002Fguides\u002Finstalling-using-pip-and-virtual-environments\u002F).\n\n```console\n# clone repository and navigate to root directory\ngit clone git@github.com:roboflow-ai\u002Fnotebooks.git\ncd notebooks\n\n# setup python environment and activate it\npython3 -m venv venv\nsource venv\u002Fbin\u002Factivate\n\n# install and run jupyter notebook\npip install notebook\njupyter notebook\n```\n\n## ☁️ run in sagemaker studio lab\n\nYou can now open our tutorial notebooks in [Amazon SageMaker Studio Lab](https:\u002F\u002Faws.amazon.com\u002Fsagemaker\u002Fstudio-lab\u002F) -\na free machine learning development environment that provides the compute, storage, and security—all at no cost—for\nanyone to learn and experiment with ML.\n\n| Stable Diffusion Image Generation | YOLOv5 Custom Dataset Training | YOLOv7 Custom Dataset Training |\n|:---------------------------------:|:------------------------------:|:------------------------------:|\n|  [![SageMaker](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Fsage-maker.svg)](https:\u002F\u002Fstudiolab.sagemaker.aws\u002Fimport\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fsagemaker-studiolab\u002Fstable-diffusion-image-generation.ipynb) | [![SageMaker](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Fsage-maker.svg)](https:\u002F\u002Fstudiolab.sagemaker.aws\u002Fimport\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fsagemaker-studiolab\u002Fyolov5-custom-training.ipynb)       |[![SageMaker](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Fsage-maker.svg)](https:\u002F\u002Fstudiolab.sagemaker.aws\u002Fimport\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fsagemaker-studiolab\u002Fyolov7-custom-training.ipynb)       |\n\n\n## 🐞 bugs & 🦸 contribution\n\nComputer Vision moves fast! Sometimes our notebooks lag a tad behind the ever-pushing\nforward libraries. If you notice that any of the notebooks is not working properly, create a\n[bug report](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fissues\u002Fnew?assignees=&labels=bug%2Ctriage&template=bug-report.yml)\nand let us know.\n\nIf you have an idea for a new tutorial we should do, create a\n[feature request](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fissues\u002Fnew?assignees=&labels=enhancement&template=feature-request.yml).\nWe are constantly looking for new ideas. If you feel up to the task and want to create a tutorial yourself, please take\na peek at our [contribution guide](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002FCONTRIBUTING.md). There you can\nfind all the information you need.\n\nWe are here for you, so don't hesitate to [reach out](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fdiscussions).\n","\u003Cdiv align=\"center\">\n  \u003Cp>\n    \u003Ca align=\"center\" href=\"\" target=\"_blank\">\n      \u003Cimg\n        width=\"850\"\n        src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_46bf7f4d3f30.png\"\n      >\n    \u003C\u002Fa>\n  \u003C\u002Fp>\n  \u003Cbr>\n\n  [notebooks](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks) | [inference](https:\u002F\u002Fgithub.com\u002Froboflow\u002Finference) | [autodistill](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill) | [RF-DETR](https:\u002F\u002Fgithub.com\u002Froboflow\u002Frf-detr)\n\n  \u003Cbr>\n\n  \u003Cdiv align=\"center\">\n      \u003Ca href=\"https:\u002F\u002Fyoutube.com\u002Froboflow\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_4a6072846419.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Froboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_4609eef41cf4.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fwww.linkedin.com\u002Fcompany\u002Froboflow-ai\u002F\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_c2b5b316fb59.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fdocs.roboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_b11ef9eeaca7.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fdiscuss.roboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_171d8d7d7d7b.png\"\n            width=\"3%\"\n          \u002F>\n      \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_896994f79d98.png\" width=\"3%\"\u002F>\n      \u003Ca href=\"https:\u002F\u002Fblog.roboflow.com\">\n          \u003Cimg\n            src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_aace967bbd3c.png\"\n            width=\"3%\"\n          \u002F>\n      \u003C\u002Fa>\n      \u003C\u002Fa>\n  \u003C\u002Fdiv>\n\n\u003C\u002Fdiv>\n\n## 👋 你好\n\n本仓库提供不断增长的计算机视觉教程集合。学习如何使用 SOTA 模型，如 YOLOv11、SAM 2、Florence-2、PaliGemma 2 和 Qwen2.5-VL，完成从目标检测、分割和姿态估计到数据提取和 OCR 等任务。快来探索激动人心的计算机视觉世界吧！\n\n\u003C!--- AUTOGENERATED-NOTEBOOKS-TABLE -->\n\u003C!---\n   警告：请勿手动编辑此表格。它将自动更新。\n   如需了解如何正确地进行更改，请参阅 CONTRIBUTING.MD 文件。\n-->\n\n## 🚀 model tutorials (59 notebooks)\n| **notebook** | **open in colab \u002F kaggle \u002F sagemaker studio lab** | **complementary materials** | **repository \u002F paper** |\n|:------------:|:-------------------------------------------------:|:---------------------------:|:----------------------:|\n| [How to Perform OCR with GLM-OCR](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-perform-ocr-with-glm-ocr.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-perform-ocr-with-glm-ocr.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-perform-ocr-with-glm-ocr.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=_KCXD8vFIYM) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fzai-org\u002FGLM-OCR) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2603.10910-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2603.10910)|\n| [How to Track Objects with RF-DETR and ByteTrack Tracker](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [Fine-Tune YOLO26 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolo26-custom-data\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=sczVknSvzh4a) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLO26 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-instance-segmentation-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-instance-segmentation-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-instance-segmentation-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolo26-instance-segmentation-custom-data\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=sczVknSvzh4a) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| Segment Images with SAM3 | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-segment-anything-3.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-segment-anything-3.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fsam3\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=G1AEuFwQrWU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsam3) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.16719-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.16719)|\n| Segment Videos with SAM3 | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-segment-anything-3.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-segment-anything-3.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fsam3\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=G1AEuFwQrWU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsam3) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.16719-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.16719)|\n| [Open Vocabulary Object Detection with Qwen3-VL](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopen-vocabulary-object-detection-with-qwen3-vl.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopen-vocabulary-object-detection-with-qwen3-vl.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopen-vocabulary-object-detection-with-qwen3-vl.ipynb)  |   | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FQwenLM\u002FQwen3-VL) |\n| [Fine-Tune RF-DETR Segmentation on Custom Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-segmentation-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-segmentation-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-segmentation-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Frf-detr-segmentation) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=boVmetUahes) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Frf-detr) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.09554-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.09554)|\n| [Zero-Shot Object Detection and Segmentation with Google Gemini 2.5](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-google-gamini-2-5.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-google-gamini-2-5.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-google-gamini-2-5.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fgemini-2-5-object-detection-segmentation\u002F)  |  [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2507.06261-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2507.06261)|\n| [Fine-Tune RF-DETR on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-rf-detr-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Frf-detr) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Flive\u002FyHW0ip-2i54) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Frf-detr) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2511.09554-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2511.09554)|\n| [Zero-Shot Object Detection and Segmentation with YOLOE](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-yoloe.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-yoloe.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-and-segmentation-with-yoloe.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyoloe-zero-shot-object-detection-segmentation\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=eHAnIehnCt4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FTHU-MIG\u002Fyoloe) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2503.07465-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2503.07465)|\n| [Fine-Tune YOLOv12 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov12-object-detection-model.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov12-object-detection-model.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov12-object-detection-model.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov12-model)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fsunsmarterjie\u002Fyolov12) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2502.12524-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2502.12524)|\n| [Zero-Shot Object Detection with Qwen2.5-VL](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-qwen2-5-vl.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-qwen2-5-vl.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-qwen2-5-vl.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fqwen2-5-vl-zero-shot-object-detection\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=xEfh0IR8Fvo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FQwenLM\u002FQwen2.5-VL) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2502.13923-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2502.13923)|\n| [Fine-Tune Qwen2.5-VL for JSON Data Extraction](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-qwen2-5-vl-for-json-data-extraction.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-qwen2-5-vl-for-json-data-extraction.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-qwen2-5-vl-for-json-data-extraction.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=xEfh0IR8Fvo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FQwenLM\u002FQwen2.5-VL) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2502.13923-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2502.13923)|\n| [Fine-Tune PaliGemma2 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-paligemma-2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2412.03555-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2412.03555)|\n| [Fine-Tune PaliGemma2 for JSON Data Extraction](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-for-json-data-extraction.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-for-json-data-extraction.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-for-json-data-extraction.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-paligemma-2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2412.03555-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2412.03555)|\n| [Fine-Tune PaliGemma2 for LaTeX OCR](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-latex-ocr-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-latex-ocr-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma2-on-latex-ocr-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-paligemma-2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2412.03555-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2412.03555)|\n| [Fine-Tune SAM-2.1](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffine-tune-sam-2.1.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffine-tune-sam-2.1.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffine-tune-sam-2.1.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-sam-2-1\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=QnCGcFHZy9s) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsam2) |\n| [Fine-Tune GPT-4o on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopenai-gpt-4o-fine-tuning.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopenai-gpt-4o-fine-tuning.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fopenai-gpt-4o-fine-tuning.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fgpt-4o-object-detection\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=6Q6TieCBA4E) |  |\n| [Fine-Tune YOLO11 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov11-how-to-train-custom-data\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=jE_s4tVgPHA) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLO11 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-instance-segmentation-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-instance-segmentation-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo11-instance-segmentation-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolo11-segmentation\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=jE_s4tVgPHA) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Segment Images with SAM2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-sam-2.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-sam-2.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-images-with-sam-2.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-segment-anything-2\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Flive\u002FDv003fTyO-Y) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything-2) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2408.00714-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2408.00714)|\n| [Segment Videos with SAM2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-sam-2.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-sam-2.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-videos-with-sam-2.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-segment-anything-2\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Flive\u002FDv003fTyO-Y) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything-2) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2408.00714-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2408.00714)|\n| [Fine-Tune RT-DETR on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rt-detr-on-custom-dataset-with-transformers.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rt-detr-on-custom-dataset-with-transformers.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rt-detr-on-custom-dataset-with-transformers.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-rt-detr-custom-dataset-transformers\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Flyuwenyu\u002FRT-DETR) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.08069-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.08069)|\n| [Fine-Tune Florence-2 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-florence-2-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-florence-2-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-florence-2-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ffine-tune-florence-2-object-detection\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=i3KjYgxNH6w) |  [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2311.06242-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2311.06242)|\n| [Run Different Vision Tasks with Florence-2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-run-different-vision-tasks-with-florence-2.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-run-different-vision-tasks-with-florence-2.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-run-different-vision-tasks-with-florence-2.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fflorence-2\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=hj_ybcRdk5Y) |  [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2311.06242-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2311.06242)|\n| [Fine-Tune PaliGemma on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma-on-detection-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma-on-detection-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-finetune-paligemma-on-detection-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-fine-tune-paligemma\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=OMBmVInx68M) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fgoogle-research\u002Fbig_vision\u002Fblob\u002Fmain\u002Fbig_vision\u002Fconfigs\u002Fproj\u002Fpaligemma\u002FREADME.md) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2407.07726-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2407.07726)|\n| [Fine-Tune YOLOv10 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov10-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov10-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov10-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov10-how-to-train\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FTHU-MIG\u002Fyolov10) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2405.14458-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2405.14458)|\n| [Zero-Shot Object Detection with YOLO-World](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-yolo-world.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-yolo-world.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-yolo-world.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-yolo-world\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=X7gKBGVz4vs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FAILab-CVC\u002FYOLO-World) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2401.17270-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2401.17270)|\n| [Fine-Tune YOLOv9 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov9-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov9-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov9-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov9-model) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FXHT2c8jT3Bc) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov9) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2402.13616-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2402.13616)|\n| [Fine-Tune RTMDet on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rtmdet-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rtmdet-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-rtmdet-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-rtmdet-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002F5kgWyo6Sg4E) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fopen-mmlab\u002Fmmdetection) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2212.07784-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2212.07784)|\n| [Segment Images with FastSAM](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-fast-sam.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-fast-sam.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-fast-sam.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-use-fastsam) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FyHNPyqazYYU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FCASIA-IVA-Lab\u002FFastSAM) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2306.12156-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2306.12156)|\n| [Fine-Tune YOLO-NAS on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo-nas-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo-nas-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo-nas-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolo-nas-how-to-train-on-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FV-H3eoPUnA8) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FDeci-AI\u002Fsuper-gradients\u002Fblob\u002Fmaster\u002FYOLONAS.md) |\n| [Segment Images with Segment Anything Model (SAM)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-sam.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-sam.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-segment-anything-with-sam.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-use-segment-anything-model-sam) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FD-D6ZmadzPE) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.02643-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.02643)|\n| [Zero-Shot Object Detection with Grounding DINO](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-grounding-dino.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-grounding-dino.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fzero-shot-object-detection-with-grounding-dino.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fgrounding-dino-zero-shot-object-detection) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FcMa77r3YrDk) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FIDEA-Research\u002FGroundingDINO) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2303.05499-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2303.05499)|\n| [Fine-Tune DETR Transformer on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-huggingface-detr-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-huggingface-detr-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-huggingface-detr-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fyoutu.be\u002FAM8D4j9KoaU) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FAM8D4j9KoaU) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdetr) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2005.12872-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2005.12872)|\n| [Classify Images with DINOv2](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-classification.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-classification.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-classification.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-classify-images-with-dinov2\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdinov2\u002F) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.07193-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.07193)|\n| [Fine-Tune YOLOv8 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-object-detection-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-object-detection-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-object-detection-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov8-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FwuZtUMEiKWY) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Pose Estimation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-keypoint.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-keypoint.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-keypoint.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-a-custom-yolov8-pose-estimation-model\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Oriented Bounding Boxes (OBB) Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-obb.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-obb.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-obb.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002Fblog.roboflow.com\u002Ftrain-yolov8-obb-model\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-instance-segmentation-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-instance-segmentation-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-instance-segmentation-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov8-instance-segmentation\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FpFiGSrRtaU4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv8 on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-classification-on-custom-dataset.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-classification-on-custom-dataset.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov8-classification-on-custom-dataset.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-a-yolov8-classification-model\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n| [Fine-Tune YOLOv7 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov7-custom-dataset-training-tutorial) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=5nsmXLyDaU4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov7) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2207.02696-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.02696)|\n| [Fine-Tune YOLOv7 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-instance-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-instance-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-instance-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov7-instance-segmentation-on-custom-data) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=vFGxM2KLs10) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov7) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2207.02696-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.02696)|\n| [Fine-Tune MT-YOLOv6 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov6-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=fFCWrMFH2UY) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fmeituan\u002FYOLOv6) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2209.02976-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2209.02976)|\n| [Fine-Tune YOLOv5 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov5-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002Fwatch?v=x0ThXHbtqCQ) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fyolov5) |\n| [Fine-Tune YOLOv5 on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-classification-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-classification-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-classification-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov5-classification-custom-data) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=DPjp9Kq4qn8) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fyolov5) |\n| [Fine-Tune YOLOv5 on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-instance-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-instance-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov5-instance-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolov5-instance-segmentation-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=vKzfvtEtiYo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fyolov5) |\n| [Fine-Tune Faster RCNN on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-detectron2-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-detectron2-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-detectron2-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-detectron2) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002Fe8LPflX0nwQ) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdetectron2) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-1703.06870v3-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F1703.06870v3)|\n| [Fine-Tune SegFormer on Instance Segmentation Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segformer-segmentation-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segformer-segmentation-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segformer-segmentation-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-segformer-on-a-custom-dataset-with-pytorch-lightning) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=4HNkBMfw-2o) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FNVlabs\u002FSegFormer) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2105.15203v3-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2105.15203v3)|\n| [Fine-Tune ViT on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-vision-transformer-classification-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-vision-transformer-classification-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-vision-transformer-classification-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-vision-transformer) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=8yRE2Pa-8_I) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Flucidrains\u002Fvit-pytorch) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2010.11929-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2010.11929)|\n| [Fine-Tune Scaled-YOLOv4 on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-scaled-yolov4-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-scaled-yolov4-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-scaled-yolov4-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-scaled-yolov4) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=rEbpKxZbvIo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002FScaledYOLOv4) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2004.10934-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2004.10934)|\n| [Fine-Tune YOLOS on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolos-huggingface-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolos-huggingface-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolos-huggingface-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolos-transformer-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=N0V0xxSi6Xc) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fhuggingface\u002Ftransformers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2106.00666-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.00666)|\n| [Fine-Tune YOLOR on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolor-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolor-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolor-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-yolor-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=sZ5DiXDOHEM) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fyolor) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-1506.02640-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F1506.02640)|\n| [Fine-Tune YOLOX on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolox-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolox-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolox-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolox-on-a-custom-dataset) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=q3RbFbaQQGw) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FMegvii-BaseDetection\u002FYOLOX) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2107.08430-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2107.08430)|\n| [Fine-Tune ResNet34 on Classification Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-resnet34-classification.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-resnet34-classification.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-resnet34-classification.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-a-custom-resnet34-model) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=93kXzUOiYY4) |  |\n| [Image Classification with OpenAI Clip](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-openai-clip-classification.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-openai-clip-classification.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-openai-clip-classification.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-use-openai-clip) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=8o701AEoZ8I) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fopenai\u002FCLIP) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2103.00020-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2103.00020)|\n| [Fine-Tune YOLOv4-tiny Darknet on Object Detection Dataset](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov4-tiny-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov4-tiny-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov4-tiny-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.ai\u002Ftrain-yolov4-tiny-on-custom-data-lighting-fast-detection) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=NTnZgLsk_DA) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fdarknet) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2011.04244-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2011.04244)|\n| [Train a YOLOv8 Classification Model with No Labeling](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-train-yolov8-classification-no-labeling.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-train-yolov8-classification-no-labeling.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-train-yolov8-classification-no-labeling.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-classification-model-no-labeling\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fultralytics\u002Fultralytics) |\n\n## 📍 跟踪器教程（3 个笔记本）\n| **笔记本** | **在 Colab \u002F Kaggle \u002F SageMaker Studio Lab 中打开** | **补充材料** | **仓库 \u002F 论文** |\n|:------------:|:-------------------------------------------------:|:---------------------------:|:----------------------:|\n| [如何使用 RF-DETR 和 ByteTrack 跟踪器跟踪目标](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-bytetrack-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [如何使用 RF-DETR 和 SORT 跟踪器跟踪目标](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-sort-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-sort-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-sort-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-1602.00763-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F1602.00763)|\n| [如何使用 RF-DETR 和 OC-SORT 跟踪器跟踪目标](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-ocsort-tracker.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-ocsort-tracker.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-objects-with-ocsort-tracker.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=u0k2dTZ0vfs) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Ftrackers) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2206.14360-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2206.14360)|\n\n## 🛠️ 计算机视觉技能（23 个笔记本）\n| **笔记本** | **在 Colab \u002F Kaggle \u002F SageMaker Studio Lab 中打开** | **补充材料** | **仓库 \u002F 论文** |\n|:------------:|:-------------------------------------------------:|:---------------------------:|:----------------------:|\n| [篮球 AI：检测 NBA 3 秒违例](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-automatic-detection-of-3-second-violations.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-automatic-detection-of-3-second-violations.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-automatic-detection-of-3-second-violations.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fdetect-3-second-violation-ai-basketball)  |  |\n| [篮球 AI：如何检测、跟踪并识别篮球运动员](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-how-to-detect-track-and-identify-basketball-players.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-how-to-detect-track-and-identify-basketball-players.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fbasketball-ai-how-to-detect-track-and-identify-basketball-players.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fidentify-basketball-players) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=yGQb9KkvQ1Q) |  |\n| [足球 AI](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffootball-ai.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffootball-ai.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ffootball-ai.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fcamera-calibration-sports-computer-vision\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FaBVGKoNZQUw) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsports) |\n| [使用 GroundedSAM 2 自动标注数据集](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fgrounded-sam-2-auto-label.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fgrounded-sam-2-auto-label.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fgrounded-sam-2-auto-label.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fwhat-is-segment-anything-2)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fsegment-anything-2) |\n| [使用 OpenVINO + TorchORT 运行 YOLOv7 目标检测](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Faccelerate-pytorch-openvino-torch-ort)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fyolov7) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2207.02696-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.02696)|\n| [使用 YOLOv8 估计车辆速度](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-estimate-vehicle-speed-with-computer-vision.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-estimate-vehicle-speed-with-computer-vision.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-estimate-vehicle-speed-with-computer-vision.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Festimate-speed-computer-vision\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FuWP6UjDeZvY) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsupervision\u002Ftree\u002Fdevelop\u002Fexamples\u002Fspeed_estimation) |\n| [使用 YOLOv5 \u002F YOLOv8 \u002F Detectron2 + Supervision 检测并统计多边形区域内的物体](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-detect-and-count-objects-in-polygon-zone.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-detect-and-count-objects-in-polygon-zone.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-detect-and-count-objects-in-polygon-zone.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-count-objects-in-a-zone) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002Fl_kf9CfZ_8M) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsupervision) |\n| [使用 YOLOv8 + ByteTRACK + Supervision 跟踪并统计车辆](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-and-count-vehicles-with-yolov8.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-and-count-vehicles-with-yolov8.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-and-count-vehicles-with-yolov8.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov8-tracking-and-counting\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FOS5qI9YBkfk) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fsupervision) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [使用 YOLOv5 + ByteTRACK 跟踪足球运动员](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-football-players.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-football-players.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-track-football-players.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrack-football-players) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FQCG8QMhga9k) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fifzhang\u002FByteTrack) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2110.06864-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.06864)|\n| [使用 Autodistill 自动训练 YOLOv8 模型](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-auto-train-yolov8-model-with-autodistill.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-auto-train-yolov8-model-with-autodistill.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-auto-train-yolov8-model-with-autodistill.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fautodistill) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FgKTYMfwPo4M) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill) |\n| [图像嵌入分析 - 第一部分](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage_embeddings_analysis_part_1.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage_embeddings_analysis_part_1.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage_embeddings_analysis_part_1.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FYxJkE6FvGF4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fopenai\u002FCLIP) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2103.00020-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2103.00020)|\n| [使用 Grounding DINO 和 SAM 进行自动化数据标注与评估](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino-and-sam.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino-and-sam.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino-and-sam.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fenhance-image-annotation-with-grounding-dino-and-sam\u002F) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FoEQYStnF2l8) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FIDEA-Research\u002FGroundingDINO) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2303.05499-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2303.05499)|\n| [使用 Grounding DINO 进行自动化数据标注与评估](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fautomated-dataset-annotation-and-evaluation-with-grounding-dino.ipynb)  |  [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FC4NqaRBz_Kw) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FIDEA-Research\u002FGroundingDINO) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2303.05499-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2303.05499)|\n| [使用自定义标注器进行 Roboflow 视频推理](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Froboflow_video_inference_with_custom_annotators.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Froboflow_video_inference_with_custom_annotators.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Froboflow_video_inference_with_custom_annotators.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fcustom-annotator-video-inference)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow\u002Finference) |\n| [DINO-GPT-4V 目标检测](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdino-gpt4v-autodistill.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdino-gpt4v-autodistill.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdino-gpt4v-autodistill.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fdino-gpt-4v\u002F)  |  |\n| [无需标注训练分割模型](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segmentation-model-with-no-labeling.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segmentation-model-with-no-labeling.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-segmentation-model-with-no-labeling.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Ftrain-a-segmentation-model-no-labeling\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill) |\n| [DINOv2 图像检索](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-image-retrieval.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-image-retrieval.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fdinov2-image-retrieval.ipynb)  |   | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Ffacebookresearch\u002Fdinov2\u002F) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2304.07193-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2304.07193)|\n| [使用 Scikit-learn 和 Bokeh 进行向量分析](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fvector-analysis-with-sklearn-and-bokeh.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fvector-analysis-with-sklearn-and-bokeh.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fvector-analysis-with-sklearn-and-bokeh.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fvector-analysis)  |  |\n| [RF100 目标检测模型基准测试](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-rf100.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-rf100.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-rf100.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Froboflow-100) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fyoutu.be\u002FjIgZMr-PBMo) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Froboflow-100-benchmark) [![arXiv](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FarXiv-2211.13523-b31b1b.svg)](https:\u002F\u002Farxiv.org\u002Fabs\u002F2211.13523)|\n| [使用 Roboflow 创建分割掩码](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-generate-segmentation-mask-with-roboflow.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-generate-segmentation-mask-with-roboflow.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-generate-segmentation-mask-with-roboflow.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-create-segmentation-masks-with-roboflow)  |  |\n| [如何使用 PolygonZone 和 Roboflow Supervision](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-polygonzone-annotate-and-supervision.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-polygonzone-annotate-and-supervision.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fhow-to-use-polygonzone-annotate-and-supervision.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fpolygonzone\u002F)  |  |\n| [用两张标注图片训练包裹检测器](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-package-detector-two-labeled-images.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-package-detector-two-labeled-images.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-package-detector-two-labeled-images.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fpackage-detector\u002F)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Fautodistill\u002Fautodistill-seggpt) |\n| [使用 CLIP 和 faiss 进行图像到图像搜索](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage-to-image-search-clip-faiss.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage-to-image-search-clip-faiss.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fimage-to-image-search-clip-faiss.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fclip-image-search-faiss\u002F)  |  |\n\u003C!--- AUTOGENERATED-NOTEBOOKS-TABLE -->\n\n## 🎬 视频\n\n几乎每周我们都会制作教程，向您展示计算机视觉领域最热门的模型。🔥\n[订阅](https:\u002F\u002Fwww.youtube.com\u002F@Roboflow)，及时获取我们的最新YouTube视频！\n\n\u003Cp align=\"left\">\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FCilXrt3S-ws\" title=\"如何为您的项目选择最佳的计算机视觉模型\">\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_20ae1a707d43.png\" alt=\"如何为您的项目选择最佳的计算机视觉模型\" width=\"300px\" align=\"left\" \u002F>\u003C\u002Fa>\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FCilXrt3S-ws\" title=\"如何为您的项目选择最佳的计算机视觉模型\">\u003Cstrong>如何为您的项目选择最佳的计算机视觉模型\u003C\u002Fstrong>\u003C\u002Fa>\n\u003Cdiv>\u003Cstrong>创建日期：2023年5月26日\u003C\u002Fstrong> | \u003Cstrong>更新日期：2023年5月26日\u003C\u002Fstrong>\u003C\u002Fdiv>\n\u003Cbr\u002F> 在本视频中，我们将深入探讨为您的独特项目选择合适计算机视觉模型的复杂性。从高质量数据集的重要性到硬件考量、互操作性、基准测试以及许可问题，本视频将为您全面解析…… \u003C\u002Fp> \u003Cbr\u002F>\n\n\u003Cp align=\"left\">\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FoEQYStnF2l8\" title=\"使用SAM和Grounding DINO加速图像标注\">\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_c1f71e4546cf.png\" alt=\"使用SAM和Grounding DINO加速图像标注\" width=\"300px\" align=\"left\" \u002F>\u003C\u002Fa>\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FoEQYStnF2l8\" title=\"使用SAM和Grounding DINO加速图像标注\">\u003Cstrong>使用SAM和Grounding DINO加速图像标注\u003C\u002Fstrong>\u003C\u002Fa>\n\u003Cdiv>\u003Cstrong>创建日期：2023年4月20日\u003C\u002Fstrong> | \u003Cstrong>更新日期：2023年4月20日\u003C\u002Fstrong>\u003C\u002Fdiv>\n\u003Cbr\u002F> 了解如何利用Grounding DINO和Segment Anything Model (SAM) 加快您的图像标注流程。学习如何将目标检测数据集转换为实例分割数据集，并探索使用这些模型自动为YOLOv8等实时检测器标注数据集的潜力…… \u003C\u002Fp> \u003Cbr\u002F>\n\u003Cp align=\"left\">\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FD-D6ZmadzPE\" title=\"SAM - Meta AI的Segment Anything Model：完整指南\">\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_readme_edd70f5e3ac5.png\" alt=\"SAM - Meta AI的Segment Anything Model：完整指南\" width=\"300px\" align=\"left\" \u002F>\u003C\u002Fa>\n\u003Ca href=\"https:\u002F\u002Fyoutu.be\u002FD-D6ZmadzPE\" title=\"SAM - Meta AI的Segment Anything Model：完整指南\">\u003Cstrong>SAM - Meta AI的Segment Anything Model：完整指南\u003C\u002Fstrong>\u003C\u002Fa>\n\u003Cdiv>\u003Cstrong>创建日期：2023年4月11日\u003C\u002Fstrong> | \u003Cstrong>更新日期：2023年4月11日\u003C\u002Fstrong>\u003C\u002Fdiv>\n\n\u003Cbr\u002F> 探索Meta AI的Segment Anything Model (SAM) 的惊人潜力！我们将深入介绍SAM——一种高效且可提示的图像分割模型，它彻底改变了计算机视觉任务。SAM在1100万张获得许可且尊重隐私的图片上生成了超过10亿个掩码，其零样本性能往往优于以往完全监督的结果…… \u003C\u002Fp>\n\n## 💻 本地运行\n\n我们尽量让在Colab和Kaggle中运行Roboflow Notebooks变得尽可能简单，但如果您仍想在本地运行它们，\n以下提供了具体的操作步骤。请记住不要全局安装依赖项，而是使用\n[venv](https:\u002F\u002Fpackaging.python.org\u002Fen\u002Flatest\u002Fguides\u002Finstalling-using-pip-and-virtual-environments\u002F)。\n\n```console\n# 克隆仓库并进入根目录\ngit clone git@github.com:roboflow-ai\u002Fnotebooks.git\ncd notebooks\n\n# 设置Python环境并激活\npython3 -m venv venv\nsource venv\u002Fbin\u002Factivate\n\n# 安装并运行Jupyter Notebook\npip install notebook\njupyter notebook\n```\n\n## ☁️ 在SageMaker Studio Lab中运行\n\n现在您可以在[Amazon SageMaker Studio Lab](https:\u002F\u002Faws.amazon.com\u002Fsagemaker\u002Fstudio-lab\u002F)中打开我们的教程笔记本——\n这是一个免费的机器学习开发环境，无需任何费用即可提供计算、存储和安全性，供任何人学习和试验机器学习。\n\n| Stable Diffusion 图像生成 | YOLOv5 自定义数据集训练 | YOLOv7 自定义数据集训练 |\n|:---------------------------------:|:------------------------------:|:------------------------------:|\n|  [![SageMaker](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Fsage-maker.svg)](https:\u002F\u002Fstudiolab.sagemaker.aws\u002Fimport\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fsagemaker-studiolab\u002Fstable-diffusion-image-generation.ipynb) | [![SageMaker](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Fsage-maker.svg)](https:\u002F\u002Fstudiolab.sagemaker.aws\u002Fimport\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fsagemaker-studiolab\u002Fyolov5-custom-training.ipynb)       |[![SageMaker](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Fsage-maker.svg)](https:\u002F\u002Fstudiolab.sagemaker.aws\u002Fimport\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Fsagemaker-studiolab\u002Fyolov7-custom-training.ipynb)       |\n\n\n## 🐞 错误与 🦸 贡献\n\n计算机视觉发展迅速！有时我们的笔记本会稍微落后于不断向前推进的库版本。如果您发现任何笔记本无法正常工作，请提交一份\n[错误报告](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fissues\u002Fnew?assignees=&labels=bug%2Ctriage&template=bug-report.yml)\n并告知我们。\n\n如果您有新的教程想法，希望我们能够制作，请提交一份\n[功能请求](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fissues\u002Fnew?assignees=&labels=enhancement&template=feature-request.yml)。\n我们一直在寻找新的创意。如果您觉得自己有能力并愿意亲自创建教程，请参阅我们的\n[贡献指南](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002FCONTRIBUTING.md)。在那里您可以找到所需的所有信息。\n\n我们随时为您提供帮助，所以请不要犹豫，随时[联系我们](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fdiscussions)。","# Roboflow Notebooks 快速上手指南\n\nRoboflow Notebooks 是一个不断增长的计算机视觉教程集合，涵盖了从目标检测、图像分割、姿态估计到数据提取和 OCR 等多种任务。本仓库提供了基于 YOLOv11、SAM 2\u002F3、Florence-2、Qwen2.5-VL 等最先进（SOTA）模型的实战代码。\n\n## 环境准备\n\n本仓库主要提供 Jupyter Notebook (`.ipynb`) 格式的教程，无需在本地复杂配置环境即可开始学习。\n\n### 系统要求\n- **操作系统**：Windows, macOS, 或 Linux\n- **浏览器**：Chrome, Firefox, Edge 等现代浏览器\n- **账号**：建议拥有 Google 账号（用于 Colab）或 Kaggle 账号\n\n### 前置依赖\n大多数教程设计为在云端 GPU 环境中直接运行，依赖项会在 Notebook 的第一个代码单元格中自动安装。如果您希望在本地运行，请确保已安装：\n- Python 3.8+\n- Jupyter Lab \u002F Jupyter Notebook\n- Git\n\n## 安装与运行步骤\n\n推荐直接使用云端平台运行，以获得免费的 GPU 资源并避免本地环境配置问题。\n\n### 方法一：使用 Google Colab（推荐）\n\n这是最简单的方式，无需任何本地安装。\n\n1. 访问 [Roboflow Notebooks GitHub 仓库](https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks)。\n2. 在 \"Model Tutorials\" 表格中找到您感兴趣的教程（例如：*Fine-Tune YOLO26 on Object Detection Dataset*）。\n3. 点击该行对应的 **Colab** 徽章按钮。\n4. 页面将跳转至 Google Colab，点击顶部菜单栏的 **“连接到运行时” (Connect)**。\n5. 如需使用 GPU，点击菜单栏 **运行时 (Runtime)** > **更改运行时类型 (Change runtime type)** > 硬件加速器选择 **GPU**。\n6. 按顺序从上到下执行每个代码单元格即可。\n\n### 方法二：使用 Kaggle Kernels\n\n如果您无法访问 Colab，Kaggle 是极佳的替代方案。\n\n1. 在教程表格中点击 **Kaggle** 徽章按钮。\n2. 登录 Kaggle 账号后，代码将加载到 Kaggle Notebook 环境中。\n3. 点击右侧菜单 **Settings**，将 **Accelerator** 设置为 **GPU T4 x2**（免费额度内）。\n4. 点击 **Run All** 或逐个单元格运行代码。\n\n### 方法三：本地克隆运行（高级用户）\n\n如果您需要在本地调试或离线运行：\n\n```bash\n# 1. 克隆仓库\ngit clone https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks.git\ncd notebooks\n\n# 2. 创建虚拟环境（可选但推荐）\npython -m venv venv\nsource venv\u002Fbin\u002Factivate  # Windows 用户请使用: venv\\Scripts\\activate\n\n# 3. 安装基础依赖\npip install jupyterlab notebook\n\n# 4. 启动 Jupyter\njupyter lab\n```\n*注意：具体的模型依赖库（如 `ultralytics`, `torch`, `transformers` 等）请在打开具体的 `.ipynb` 文件后，运行第一个单元格的安装命令。*\n\n## 基本使用示例\n\n以下以“在自定义数据集上微调 YOLO26 进行目标检测”为例，展示基本使用流程。\n\n### 1. 打开教程\n在 Colab 中打开 [Fine-Tune YOLO26 on Object Detection Dataset](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolo26-object-detection-on-custom-dataset.ipynb)。\n\n### 2. 安装依赖\n运行第一个代码块，自动安装所需库：\n```python\n!pip install ultralytics roboflow\n```\n\n### 3. 加载数据集\n使用 Roboflow Python SDK 加载数据集（需替换为您的 API Key 和工作区信息）：\n```python\nfrom roboflow import Roboflow\n\nrf = Roboflow(api_key=\"YOUR_API_KEY\")\nproject = rf.workspace(\"your-workspace\").project(\"your-project\")\ndataset = project.version(1).download(\"yolov8\")\n```\n\n### 4. 训练模型\n使用 `ultralytics` 库启动训练：\n```python\nfrom ultralytics import YOLO\n\n# 加载预训练模型\nmodel = YOLO(\"yolo11n.pt\") \n\n# 开始训练\nresults = model.train(\n    data=f\"{dataset.location}\u002Fdata.yaml\",\n    epochs=50,\n    imgsz=640\n)\n```\n\n### 5. 推理测试\n训练完成后，直接使用模型进行预测：\n```python\n# 对图片进行推理\nresults = model.predict(source=\"path\u002Fto\u002Fimage.jpg\", show=True)\n```\n\n---\n**提示**：每个 Notebook 都包含了详细的注释和可视化结果展示，建议按照单元格顺序逐步执行以理解每一步的原理。","某零售科技公司的算法工程师需要快速为新的无人货架项目部署一套能同时识别商品种类、检测缺货状态并读取价格标签的视觉系统。\n\n### 没有 notebooks 时\n- **模型选型迷茫**：面对 YOLO11、SAM 3、Qwen3-VL 等层出不穷的 SOTA 模型，难以找到官方认可的集成代码，需花费数天在碎片化的博客和过时的 GitHub 仓库中摸索。\n- **环境配置繁琐**：不同模型依赖复杂的预处理流程和特定的库版本，手动复现论文代码时常因缺少关键步骤（如数据增强或后处理逻辑）导致推理失败。\n- **多任务整合困难**：想要结合目标检测（缺货分析）与 OCR（价格读取）技术时，缺乏现成的串联示例，需自行编写大量胶水代码来对齐不同模型的输入输出格式。\n- **试错成本高昂**：从零开始调试架构细节占用了 80% 的时间，导致核心业务逻辑验证周期被严重拉长，无法快速响应产品上线需求。\n\n### 使用 notebooks 后\n- **即开即用教程**：直接调用 Roboflow Notebooks 中关于 YOLO11 检测和 GLM-OCR 文字提取的现成 Colab 脚本，5 分钟内即可跑通基准测试，无需四处搜寻代码。\n- **标准化最佳实践**：每个 Notebook 都包含了经过验证的数据加载、推理及可视化全流程，消除了因手动实现疏漏导致的报错，确保环境一次运行成功。\n- **一站式多模态方案**：参考库中将检测模型与视觉语言模型（VLM）结合的进阶案例，轻松复制“先定位商品再读取标签”的级联逻辑，大幅降低多模型协同开发门槛。\n- **聚焦业务创新**：将原本用于底层调试的时间全部转移到针对货架场景的微调优化上，原型开发效率提升 3 倍以上，迅速完成概念验证（POC）。\n\nNotebooks 通过将前沿计算机视觉模型转化为可执行的教学代码，让开发者从繁琐的环境搭建中解放出来，真正实现从“研究模型”到“落地应用”的极速跨越。","https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Froboflow_notebooks_97d7ef6f.png","roboflow","Roboflow","https:\u002F\u002Foss.gittoolsai.com\u002Favatars\u002Froboflow_1234eb3b.png","",null,"hello@roboflow.com","https:\u002F\u002Froboflow.com","https:\u002F\u002Fgithub.com\u002Froboflow",[82,86],{"name":83,"color":84,"percentage":85},"Jupyter Notebook","#DA5B0B",100,{"name":87,"color":88,"percentage":89},"Python","#3572A5",0,9300,1433,"2026-04-07T08:25:50","未说明","未说明 (教程涵盖从轻量级模型到大型多模态模型，具体需求取决于所选笔记本；云端环境如 Colab\u002FKaggle 通常提供 T4 或更高规格 GPU)",{"notes":96,"python":93,"dependencies":97},"该仓库主要是一系列计算机视觉教程笔记本，设计为可在 Google Colab、Kaggle 和 SageMaker Studio Lab 等云端平台直接运行，因此本地环境配置非强制。具体依赖和硬件需求因每个教程使用的模型（如 SAM3, YOLO26, Qwen3-VL, GLM-OCR）而异，建议针对特定笔记本查看其内部代码单元格以获取准确的安装指令和资源要求。",[98,99,100,73,101],"ultralytics (YOLOv11\u002FYOLO26)","transformers (Qwen, Florence-2, PaliGemma)","torch","supervision",[15,14,35],[104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123],"computer-vision","deep-learning","deep-neural-networks","image-classification","image-segmentation","object-detection","yolov5","pytorch","tutorial","yolov8","google-colab","machine-learning","zero-shot-classification","zero-shot-detection","open-vocabulary-detection","automatic-labeling-system","open-vocabulary-segmentation","paligemma","qwen","vlm","2026-03-27T02:49:30.150509","2026-04-08T01:57:15.531504",[127,132,137,142,147,151],{"id":128,"question_zh":129,"answer_zh":130,"source_url":131},23439,"训练大型数据集时遇到 numpy 内存分配错误（_ArrayMemoryError）怎么办？","该问题通常是因为一次性加载所有图像导致内存溢出。解决方案是升级到最新版本的 autodistill，新版本支持最新的 supervision 库并引入了“懒加载”（lazy data loading）功能，可以避免一次性将所有数据载入内存。如果问题依旧，可能需要检查 dataset_detection.py 中的相对路径与绝对路径配置是否正确。","https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks\u002Fissues\u002F336",{"id":133,"question_zh":134,"answer_zh":135,"source_url":136},23440,"运行 YOLOv8 自定义训练脚本时提示 'dataset not found'（未找到数据集）错误如何解决？","此错误通常是由于配置文件路径不正确导致的。请检查你的项目目录下是否存在 data.yaml 文件，并确认代码中引用的路径与实际文件路径完全一致（例如确认是否在 D:\\dataet\\data.yaml）。此外，确保你使用的是官方提供的最新 Notebook 模板，因为旧版本可能存在路径硬编码问题。","https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks\u002Fissues\u002F41",{"id":138,"question_zh":139,"answer_zh":140,"source_url":141},23441,"使用自定义 YOLOv12 模型进行推理时出现 AttributeError 错误怎么办？","这通常是因为使用了不匹配的权重文件或代码实现有小缺陷。尝试以下两种方法：1. 检查权重版本，不要使用 \"Turbo\" 版本的权重，请改用 \"v1.0\" 版本的权重进行训练和推理。2. 如果是代码问题，前往 ultralytics\u002Fnn\u002Fmodules\u002Fblock.py 文件，在 Attn 类中添加一行代码：self.qkv = self.qk，以修复属性缺失问题。","https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks\u002Fissues\u002F370",{"id":143,"question_zh":144,"answer_zh":145,"source_url":146},23442,"在使用 HuggingFace YOLOS 模型训练时遇到关于 'max_size' 参数废弃的 ValueError 错误如何修复？","这是因为 transformers 库的版本更新导致参数名称变更。错误提示指出 'max_size' 已被废弃。你需要修改图像处理相关的配置，将 size 参数的指定方式从旧的格式改为使用字典格式，明确指定 'longest_edge' 或 'shortest_edge'。例如，将 size 设置为 {'longest_edge': 值} 而不是直接传递元组或废弃的参数名。建议查看 Roboflow 文档中 YOLOS 模型部分的最新更新以获取修正后的代码片段。","https:\u002F\u002Fgithub.com\u002Froboflow\u002Fnotebooks\u002Fissues\u002F22",{"id":148,"question_zh":149,"answer_zh":150,"source_url":131},23443,"如何处理 supervision 库中传入 DetectionDataset 的数据格式弃用警告？","当你看到警告 \"Passing a `Dict[str, np.ndarray]` into `DetectionDataset` is deprecated\" 时，说明你正在使用旧的数据传入方式。解决方法是将输入数据格式从字典形式的 numpy 数组（Dict[str, np.ndarray]）更改为文件路径列表（List[str]）。你需要修改数据加载逻辑，直接传递图像文件的路径列表给 DetectionDataset，而不是预先加载好的数组。",{"id":152,"question_zh":153,"answer_zh":154,"source_url":136},23444,"在 Colab 或 Kaggle 上运行 Roboflow Notebook 时遇到环境依赖或路径问题该怎么办？","首先确保你使用的是仓库中最新的 Notebook 文件，因为旧版本可能包含过时的路径配置或依赖项。如果在自定义环境中运行，请检查 data.yaml 等配置文件中的路径是否为绝对路径或与当前运行环境匹配的相对路径。对于特定的库版本冲突（如 supervision 或 ultralytics），尝试在 notebook 开头强制安装指定版本，例如使用 !pip install -U roboflow autodistill supervision 来更新到支持懒加载和新 API 的版本。",[156],{"id":157,"version":158,"summary_zh":159,"released_at":160},144957,"1.0.0","# 1.0.0 - 2022-12-01\n\n[Roboflow Notebooks](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks) 的首次发布 💜！\n\n## 🚀 新增内容\n\n| **笔记本** | **在 Colab \u002F Kaggle \u002F SageMaker Studio Lab 中打开** | **补充材料** | **仓库** |\n|:------------:|:------------------------------------:|:---------------------------:|:--------------:|\n| [YOLOv7 PyTorch 目标检测](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fyolov7-custom-dataset-training-tutorial) [![YouTube](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fyoutube.svg)](https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=5nsmXLyDaU4) | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002FWongKinYiu\u002Fyolov7) |\n| [YOLOv7 PyTorch 目标检测 OpenVINO + TorchORT](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov7-object-detection-on-custom-data-openvino-torch-ort.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Faccelerate-pytorch-openvino-torch-ort)  | [![GitHub](https:\u002F\u002Fbadges.aleen42.com\u002Fsrc\u002Fgithub.svg)](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fyolov7) |\n| [MT-YOLOv6 PyTorch 目标检测](https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb) | [![Colab](https:\u002F\u002Fcolab.research.google.com\u002Fassets\u002Fcolab-badge.svg)](https:\u002F\u002Fcolab.research.google.com\u002Fgithub\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb) [![Kaggle](https:\u002F\u002Fkaggle.com\u002Fstatic\u002Fimages\u002Fopen-in-kaggle.svg)](https:\u002F\u002Fkaggle.com\u002Fkernels\u002Fwelcome?src=https:\u002F\u002Fgithub.com\u002Froboflow-ai\u002Fnotebooks\u002Fblob\u002Fmain\u002Fnotebooks\u002Ftrain-yolov6-object-detection-on-custom-data.ipynb)  | [![Roboflow](https:\u002F\u002Fraw.githubusercontent.com\u002Froboflow-ai\u002Fnotebooks\u002Fmain\u002Fassets\u002Fbadges\u002Froboflow-blogpost.svg)](https:\u002F\u002Fblog.roboflow.com\u002Fhow-to-train-yolov6-on-a-custom-dataset) [![YouTub","2022-12-01T22:43:27"]