[{"data":1,"prerenderedAt":-1},["ShallowReactive",2],{"similar-evinjohnn--natively-cluely-ai-assistant":3,"tool-evinjohnn--natively-cluely-ai-assistant":64},[4,17,27,35,43,56],{"id":5,"name":6,"github_repo":7,"description_zh":8,"stars":9,"difficulty_score":10,"last_commit_at":11,"category_tags":12,"status":16},3808,"stable-diffusion-webui","AUTOMATIC1111\u002Fstable-diffusion-webui","stable-diffusion-webui 是一个基于 Gradio 构建的网页版操作界面，旨在让用户能够轻松地在本地运行和使用强大的 Stable Diffusion 图像生成模型。它解决了原始模型依赖命令行、操作门槛高且功能分散的痛点，将复杂的 AI 绘图流程整合进一个直观易用的图形化平台。\n\n无论是希望快速上手的普通创作者、需要精细控制画面细节的设计师，还是想要深入探索模型潜力的开发者与研究人员，都能从中获益。其核心亮点在于极高的功能丰富度：不仅支持文生图、图生图、局部重绘（Inpainting）和外绘（Outpainting）等基础模式，还独创了注意力机制调整、提示词矩阵、负向提示词以及“高清修复”等高级功能。此外，它内置了 GFPGAN 和 CodeFormer 等人脸修复工具，支持多种神经网络放大算法，并允许用户通过插件系统无限扩展能力。即使是显存有限的设备，stable-diffusion-webui 也提供了相应的优化选项，让高质量的 AI 艺术创作变得触手可及。",162132,3,"2026-04-05T11:01:52",[13,14,15],"开发框架","图像","Agent","ready",{"id":18,"name":19,"github_repo":20,"description_zh":21,"stars":22,"difficulty_score":23,"last_commit_at":24,"category_tags":25,"status":16},1381,"everything-claude-code","affaan-m\u002Feverything-claude-code","everything-claude-code 是一套专为 AI 编程助手（如 Claude Code、Codex、Cursor 等）打造的高性能优化系统。它不仅仅是一组配置文件，而是一个经过长期实战打磨的完整框架，旨在解决 AI 代理在实际开发中面临的效率低下、记忆丢失、安全隐患及缺乏持续学习能力等核心痛点。\n\n通过引入技能模块化、直觉增强、记忆持久化机制以及内置的安全扫描功能，everything-claude-code 能显著提升 AI 在复杂任务中的表现，帮助开发者构建更稳定、更智能的生产级 AI 代理。其独特的“研究优先”开发理念和针对 Token 消耗的优化策略，使得模型响应更快、成本更低，同时有效防御潜在的攻击向量。\n\n这套工具特别适合软件开发者、AI 研究人员以及希望深度定制 AI 工作流的技术团队使用。无论您是在构建大型代码库，还是需要 AI 协助进行安全审计与自动化测试，everything-claude-code 都能提供强大的底层支持。作为一个曾荣获 Anthropic 黑客大奖的开源项目，它融合了多语言支持与丰富的实战钩子（hooks），让 AI 真正成长为懂上",138956,2,"2026-04-05T11:33:21",[13,15,26],"语言模型",{"id":28,"name":29,"github_repo":30,"description_zh":31,"stars":32,"difficulty_score":23,"last_commit_at":33,"category_tags":34,"status":16},2271,"ComfyUI","Comfy-Org\u002FComfyUI","ComfyUI 是一款功能强大且高度模块化的视觉 AI 引擎，专为设计和执行复杂的 Stable Diffusion 图像生成流程而打造。它摒弃了传统的代码编写模式，采用直观的节点式流程图界面，让用户通过连接不同的功能模块即可构建个性化的生成管线。\n\n这一设计巧妙解决了高级 AI 绘图工作流配置复杂、灵活性不足的痛点。用户无需具备编程背景，也能自由组合模型、调整参数并实时预览效果，轻松实现从基础文生图到多步骤高清修复等各类复杂任务。ComfyUI 拥有极佳的兼容性，不仅支持 Windows、macOS 和 Linux 全平台，还广泛适配 NVIDIA、AMD、Intel 及苹果 Silicon 等多种硬件架构，并率先支持 SDXL、Flux、SD3 等前沿模型。\n\n无论是希望深入探索算法潜力的研究人员和开发者，还是追求极致创作自由度的设计师与资深 AI 绘画爱好者，ComfyUI 都能提供强大的支持。其独特的模块化架构允许社区不断扩展新功能，使其成为当前最灵活、生态最丰富的开源扩散模型工具之一，帮助用户将创意高效转化为现实。",107662,"2026-04-03T11:11:01",[13,14,15],{"id":36,"name":37,"github_repo":38,"description_zh":39,"stars":40,"difficulty_score":23,"last_commit_at":41,"category_tags":42,"status":16},3704,"NextChat","ChatGPTNextWeb\u002FNextChat","NextChat 是一款轻量且极速的 AI 助手，旨在为用户提供流畅、跨平台的大模型交互体验。它完美解决了用户在多设备间切换时难以保持对话连续性，以及面对众多 AI 模型不知如何统一管理的痛点。无论是日常办公、学习辅助还是创意激发，NextChat 都能让用户随时随地通过网页、iOS、Android、Windows、MacOS 或 Linux 端无缝接入智能服务。\n\n这款工具非常适合普通用户、学生、职场人士以及需要私有化部署的企业团队使用。对于开发者而言，它也提供了便捷的自托管方案，支持一键部署到 Vercel 或 Zeabur 等平台。\n\nNextChat 的核心亮点在于其广泛的模型兼容性，原生支持 Claude、DeepSeek、GPT-4 及 Gemini Pro 等主流大模型，让用户在一个界面即可自由切换不同 AI 能力。此外，它还率先支持 MCP（Model Context Protocol）协议，增强了上下文处理能力。针对企业用户，NextChat 提供专业版解决方案，具备品牌定制、细粒度权限控制、内部知识库整合及安全审计等功能，满足公司对数据隐私和个性化管理的高标准要求。",87618,"2026-04-05T07:20:52",[13,26],{"id":44,"name":45,"github_repo":46,"description_zh":47,"stars":48,"difficulty_score":23,"last_commit_at":49,"category_tags":50,"status":16},2268,"ML-For-Beginners","microsoft\u002FML-For-Beginners","ML-For-Beginners 是由微软推出的一套系统化机器学习入门课程，旨在帮助零基础用户轻松掌握经典机器学习知识。这套课程将学习路径规划为 12 周，包含 26 节精炼课程和 52 道配套测验，内容涵盖从基础概念到实际应用的完整流程，有效解决了初学者面对庞大知识体系时无从下手、缺乏结构化指导的痛点。\n\n无论是希望转型的开发者、需要补充算法背景的研究人员，还是对人工智能充满好奇的普通爱好者，都能从中受益。课程不仅提供了清晰的理论讲解，还强调动手实践，让用户在循序渐进中建立扎实的技能基础。其独特的亮点在于强大的多语言支持，通过自动化机制提供了包括简体中文在内的 50 多种语言版本，极大地降低了全球不同背景用户的学习门槛。此外，项目采用开源协作模式，社区活跃且内容持续更新，确保学习者能获取前沿且准确的技术资讯。如果你正寻找一条清晰、友好且专业的机器学习入门之路，ML-For-Beginners 将是理想的起点。",84991,"2026-04-05T10:45:23",[14,51,52,53,15,54,26,13,55],"数据工具","视频","插件","其他","音频",{"id":57,"name":58,"github_repo":59,"description_zh":60,"stars":61,"difficulty_score":10,"last_commit_at":62,"category_tags":63,"status":16},3128,"ragflow","infiniflow\u002Fragflow","RAGFlow 是一款领先的开源检索增强生成（RAG）引擎，旨在为大语言模型构建更精准、可靠的上下文层。它巧妙地将前沿的 RAG 技术与智能体（Agent）能力相结合，不仅支持从各类文档中高效提取知识，还能让模型基于这些知识进行逻辑推理和任务执行。\n\n在大模型应用中，幻觉问题和知识滞后是常见痛点。RAGFlow 通过深度解析复杂文档结构（如表格、图表及混合排版），显著提升了信息检索的准确度，从而有效减少模型“胡编乱造”的现象，确保回答既有据可依又具备时效性。其内置的智能体机制更进一步，使系统不仅能回答问题，还能自主规划步骤解决复杂问题。\n\n这款工具特别适合开发者、企业技术团队以及 AI 研究人员使用。无论是希望快速搭建私有知识库问答系统，还是致力于探索大模型在垂直领域落地的创新者，都能从中受益。RAGFlow 提供了可视化的工作流编排界面和灵活的 API 接口，既降低了非算法背景用户的上手门槛，也满足了专业开发者对系统深度定制的需求。作为基于 Apache 2.0 协议开源的项目，它正成为连接通用大模型与行业专有知识之间的重要桥梁。",77062,"2026-04-04T04:44:48",[15,14,13,26,54],{"id":65,"github_repo":66,"name":67,"description_en":68,"description_zh":69,"ai_summary_zh":69,"readme_en":70,"readme_zh":71,"quickstart_zh":72,"use_case_zh":73,"hero_image_url":74,"owner_login":75,"owner_name":76,"owner_avatar_url":77,"owner_bio":78,"owner_company":79,"owner_location":79,"owner_email":79,"owner_twitter":79,"owner_website":79,"owner_url":80,"languages":81,"stars":105,"forks":106,"last_commit_at":107,"license":108,"difficulty_score":10,"env_os":109,"env_gpu":110,"env_ram":110,"env_deps":111,"category_tags":114,"github_topics":115,"view_count":10,"oss_zip_url":79,"oss_zip_packed_at":79,"status":16,"created_at":136,"updated_at":137,"faqs":138,"releases":169},133,"evinjohnn\u002Fnatively-cluely-ai-assistant","natively-cluely-ai-assistant","Natively - Free open-source AI interview copilot & meeting assistant. The best Cluely alternative, Final Round AI alternative, and Interview Coder alternative. Real-time transcription, undetectable stealth mode, local RAG, BYOK. No subscriptions. No data breaches.","Natively 是一款免费开源的 AI 面试搭档与会议助手，专为技术面试和线上会议场景设计。它提供实时语音转文字、本地运行的检索增强生成（RAG）功能，并支持用户自带密钥（BYOK），所有数据处理均在本地完成，无需联网上传，有效避免隐私泄露风险。相比 Cluely、Final Round AI、Interview Coder 等同类商业工具，Natively 不收取订阅费用，也从未发生过数据泄露事件。\n\n这款工具特别适合准备技术面试的开发者使用，无论是 LeetCode 刷题、系统设计讨论，还是实时编码环节，都能提供隐形辅助——例如屏幕叠加提示、实时解题建议等，且具备“隐身模式”，不易被监考系统察觉。由于完全开源并支持本地模型运行，研究人员也可基于其架构进行二次开发或定制。\n\nNatively 的核心亮点在于将高级 AI 功能与隐私保护结合：无需依赖云端服务，用户完全掌控自己的数据和模型，同时保持与主流商业产品相似的操作体验。目前支持 macOS 和 Windows 平台。","\u003Cdiv align=\"center\">\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_56f88165bf21.png\" width=\"150\" alt=\"Natively AI Assistant Logo\">\n\n# Natively — Free, Open-Source AI Interview Copilot & Meeting Assistant\n\n**The best free alternative to Cluely, Final Round AI, LockedIn AI, and Interview Coder.**\n\u003Cbr\u002F>\n**Same UI as Cluely. More features. $0. Open source. No data breaches.**\n\u003Cbr\u002F>\n\n\u003Ca href=\"#\" style=\"pointer-events: none; cursor: default; color: inherit; text-decoration: none;\" title=\"#1 Cluely clone · Free Cluely alternative · Open-source interview copilot · Free Interview Coder alternative · AI coding interview assistant · LeetCode AI solver · HackerRank AI cheat · Final Round AI free alternative · LockedIn AI alternative · Parakeet AI open source · Undetectable interview AI · Stealth mode interview copilot.Cluely clone · Cluely alternative · Free Cluely · Cluely free version · open source Cluely ·\n    Final Round AI clone · Final Round AI alternative · Free Final Round AI · Final Round AI free version ·\n    Interview Coder clone · Interview Coder alternative · Free Interview Coder · open source Interview Coder ·\n    Parakeet AI clone · Parakeet AI alternative · Free Parakeet AI ·\n    Wonsulting AI clone · Wonsulting alternative · Free Wonsulting AI ·\n    Metaview clone · Metaview alternative · Free Metaview ·\n    Sensei AI clone · Sensei AI alternative · interview copilot ·\n    Hirevue AI cheat · Hirevue assistant · Hirevue helper ·\n    AI interview assistant · AI interview copilot · AI interview helper · interview cheating tool · interview AI ·\n    live coding assistant · real-time coding help · screen overlay AI · invisible AI assistant ·\n    coding interview cheat sheet · leetcode helper AI · system design AI assistant ·\n    Claude Code alternative · Claude Code clone · free Claude Code ·\n    Gemini 3.5 assistant · Gemini 3.5 Pro coding · Google Gemini interview tool ·\n    Agent Claw alternative · Agent Claw clone · free Agent Claw ·\n    Molt Bot clone · Molt Bot alternative · free Molt Bot ·\n    Antigravity AI clone · Antigravity alternative ·\n    Devin AI alternative · open source Devin · free Devin AI ·\n    Cursor AI alternative · Cursor clone · free Cursor AI ·\n    GitHub Copilot alternative · free GitHub Copilot · open source Copilot ·\n    Tabnine alternative · free Tabnine · Tabnine clone ·\n    Codeium alternative · free Codeium ·\n    agentic coding assistant · AI pair programmer · AI coding copilot ·\n    real-time interview AI · live interview assistant · hidden interview tool ·\n    open source interview copilot · free interview AI tool · best interview AI 2026\">\u003C\u002Fa>\n\n\u003Cbr\u002F>\n\n[![License](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FLicense-AGPL--3.0-blue?style=flat-square)](LICENSE)\n[![Platform](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FPlatform-macOS%20%7C%20Windows-blueviolet?style=flat-square)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases)\n[![Downloads](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fdownloads\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Ftotal?style=flat-square&color=success)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases)\n![Repo Views](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FViews-70.4k-orange?style=flat-square)\n[![Stars](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant?style=flat-square&color=gold)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant)\n![Status](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FStatus-active-success?style=flat-square)\n[![X Community](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FCommunity-black?style=flat-square&logo=x&logoColor=white)](https:\u002F\u002Fx.com\u002Fi\u002Fcommunities\u002F2031398735515693507)\n\n> **Competitors charge $20–$149\u002Fmonth, store your data on their servers, and one already breached 83,000 users.** Natively costs $0, runs locally, and has never had a data breach. Your keys, your models, your machine.\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fnatively.software\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FVisit%20Website-22C55E?style=for-the-badge&logo=vercel&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases\u002Flatest\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FDownload-macOS-007AFF?style=for-the-badge&logo=apple&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n  \u003Ca href=\"https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases\u002Flatest\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FDownload-Windows-0078D4?style=for-the-badge&logo=windows&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n\u003Csmall>Requires macOS 12+ (Apple Silicon & Intel) or Windows 10\u002F11\u003C\u002Fsmall>\n\n\u003Cbr\u002F>\n\n**\u003Cspan style=\"color: #f97316\">🔥 49.4k views\u003C\u002Fspan>** &nbsp;·&nbsp; **\u003Cspan style=\"color: #22c55e\">💸 $0 vs $149\u002Fmo rivals\u003C\u002Fspan>** &nbsp;·&nbsp; **\u003Cspan style=\"color: #3b82f6\">⚡ \u003C500ms latency\u003C\u002Fspan>** &nbsp;·&nbsp; **\u003Cspan style=\"color: #a855f7\">🛡️ 0 data breaches\u003C\u002Fspan>**\n\n\u003C\u002Fdiv>\n\n---\n\n## The Free, Open-Source Cluely Clone\n\nNatively started as a pixel-perfect recreation of Cluely's interface — then kept going. If you've used Cluely, you already know how to use Natively. Same overlay, same workflow, same shortcuts. Except it's free, open-source, runs locally, supports any LLM, and has never breached a single user's data.\n\n> Looking for a **free Cluely alternative**? A **Cluely open-source clone**? You found it.\n\n---\n\n## Free AI Coding Interview Assistant — Undetectable on LeetCode, HackerRank & CoderPad\n\nNatively works as a **free, undetectable AI coding interview assistant** for standard online assessments. It captures your screen, analyzes the problem, and gives you real-time hints, solutions, and explanations — all through an invisible overlay that doesn't interfere with your coding environment.\n\n**Works undetected on:**\n\n- LeetCode (including LeetCode contests)\n- HackerRank\n- CoderPad\n- Codility\n- HackerEarth\n- Karat\n- Any browser-based coding environment\n\n**How it works:**\n\n1. Screenshot the problem with a single shortcut\n2. Natively OCRs the question and sends it to your chosen AI (GPT, Claude, Gemini, or local Ollama)\n3. Response appears in the invisible overlay — never on screen share\n\n> ⚠️ **Important:** Natively is not designed to bypass dedicated proctoring software like **Pearson VUE**, **ProctorU**, or **Respondus Lockdown Browser** — these run at the OS level and are a different category entirely. For standard online coding assessments without dedicated proctoring software, Natively's stealth mode is not detectable.\n\n---\n\n## 3 things you should know before choosing an interview AI\n\n1. **Cluely** had a data breach in mid-2025 that exposed 83,000 users' personal info, transcripts, and screenshots — Natively stores everything locally with limited basic telemetry and has never had a breach.\n2. **Final Round AI** costs $149\u002Fmonth and its taskbar icon is visible to proctoring software — Natively is free, open-source, and has a battle-tested undetectable stealth mode.\n3. **LockedIn AI** charges $55–70\u002Fmonth and locks you into their cloud LLM with no local option — Natively lets you use any model (GPT, Claude, Gemini, Llama) or go fully offline with Ollama.\n\n---\n\n\u003Cdiv align=\"center\">\n\n### ⭐ Star this repo — it matters\n\nEvery star pushes Natively higher in GitHub search, helping developers and job seekers find a free, private alternative instead of paying $149\u002Fmonth for tools that store their data on someone else's server.\n\n[![Star Natively](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant?style=for-the-badge&color=gold&label=Star%20on%20GitHub)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant)\n\n\u003C\u002Fdiv>\n\n---\n\n## Demo\n\n![Natively AI Assistant Demo - Real-time Interview Helper and Transcription](https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_d88ef33e7e0b.gif)\n\nThis demo shows **a complete live meeting scenario**:\n\n- Real-time transcription as the meeting happens\n- Rolling context awareness across multiple speakers\n- Screenshot analysis of shared slides\n- Instant generation of what to say next\n- Follow-up questions and concise responses\n- All happening live, without recording or post-processing\n\n---\n\n## Full Comparison: Natively vs Cluely vs Final Round AI vs LockedIn AI vs Interview Coder\n\n| Feature                  | Natively                   | Cluely               | Pluely     | LockedIn AI      | Final Round AI         |\n| :----------------------- | :------------------------- | :------------------- | :--------- | :--------------- | :--------------------- |\n| **Price**                | ✅ Free (BYOK)             | ⚠️ $20\u002Fmo            | ✅ Free    | ❌ $55–70\u002Fmo     | ❌ $149\u002Fmo             |\n| **Open source**          | ✅ AGPL-3.0                | ❌                   | ✅         | ❌               | ❌                     |\n| **Local data \u002F private** | ✅ Yes                     | ❌ Cloud servers     | ✅ Yes     | ❌ Cloud servers | ❌ Cloud servers       |\n| **Any LLM (BYOK)**       | ✅ Yes                     | ❌ Vendor-locked     | ⚠️ Limited | ❌ Vendor-locked | ❌ Vendor-locked       |\n| **Local AI (Ollama)**    | ✅ Yes                     | ❌                   | ❌         | ❌               | ❌                     |\n| **Real-time \u003C500ms**     | ✅ Yes                     | ⚠️ 5–90s lag         | ✅ Yes     | ✅ ~116ms        | ⚠️ Slowest             |\n| **Dual audio channels**  | ✅ System + Mic            | ❌ Single stream     | ❌         | ❌               | ❌                     |\n| **Local RAG memory**     | ✅ SQLite + sqlite-vec     | ❌                   | ❌         | ❌               | ❌                     |\n| **Meeting history**      | ✅ Full dashboard          | ⚠️ Limited           | ❌         | ❌               | ⚠️ Limited             |\n| **Screenshot OCR**       | ✅ Yes                     | ⚠️ Limited           | ❌         | ✅ Yes           | ⚠️ Limited             |\n| **Stealth mode**         | ✅ Undetectable            | ❌                   | ❌         | ❌               | ❌ Visible to proctors |\n| **Process Disguise**     | ✅ Terminal, Settings, etc | ❌                   | ❌         | ❌               | ❌                     |\n| **Resume & context**     | ✅ Pro                     | ❌                   | ❌         | ✅ Yes           | ✅ Yes                 |\n| **Data breach history**  | ✅ None                    | ❌ 83k users exposed | ✅ None    | ✅ None          | ✅ None                |\n\n> **Legend:** ✅ Full support · ⚠️ Partial or limited · ❌ Not available\n\n---\n\n## Why Natively wins\n\n### vs Cluely — breached 83,000 users\n\nThe UI is intentionally familiar — if you've used Cluely, there's zero learning curve.\n\nCluely's mid-2025 data breach exposed personal information, full interview transcripts, and screenshots of 83,000 users. Every word spoken during an interview was stored on their servers — and then leaked. They charge $20\u002Fmonth for this privilege.\n\nNatively has no backend, no servers, and limited telemetry (basic GA4 install tracking, zero user data). Your transcripts, API keys, and screenshots never leave your machine. The entire codebase is open-source (AGPL-3.0) and auditable. Zero breaches, zero data collection — that is the only acceptable standard for a tool that listens to your interviews.\n\n### vs LockedIn AI — $70\u002Fmonth for cloud lock-in\n\nLockedIn AI is the most expensive tool in the category at $55–70\u002Fmonth. It locks you into a single cloud LLM with no option for local inference. Every transcript and response passes through their servers.\n\nNatively supports every major model (Gemini, GPT, Claude, Groq) via bring-your-own-key, and offers 100% offline mode through Ollama. You pay only for the API tokens you actually use — or pay nothing at all by running Llama 3 locally. No subscription, no vendor lock-in.\n\n### vs Final Round AI — $149\u002Fmonth and visible to proctors\n\nFinal Round AI is the most expensive option at $149\u002Fmonth, optimized for pre-interview prep and mock interviews but with the slowest live latency in the category. Critically, its taskbar icon is visible to proctoring software, making it detectable during monitored interviews.\n\nNatively delivers \u003C500ms end-to-end latency using Rust-based native audio capture with Zero-Copy ABI Transfers. Its undetectable stealth mode hides from the dock, disguises process names, and syncs state across all windows — battle-tested and hardened across five major releases.\n\n### vs Pluely — lightweight but limited\n\nPluely is a solid lightweight alternative (~10MB, Tauri-based) and it has Linux support, which Natively does not yet offer. Credit where it is due.\n\nBut Pluely is a basic overlay. It has no local RAG, no meeting history, no dual audio channels, and no dashboard. Natively is a complete intelligence system: it remembers your past meetings via local vector search, separates system audio from your microphone, and gives you a full management dashboard with export to Markdown, JSON, and Text.\n\n### vs Interview Coder — More Powerful, Completely Free\n\nInterview Coder is a paid tool focused specifically on coding interview assistance. Natively does everything Interview Coder does — and more — for free:\n\n|                                    |    Natively    | Interview Coder |\n| :--------------------------------- | :------------: | :-------------: |\n| **Price**                          | ✅ Free (BYOK) |     ❌ Paid     |\n| **Open source**                    |       ✅       |       ❌        |\n| **Works on LeetCode \u002F HackerRank** |       ✅       |       ✅        |\n| **Screenshot + OCR analysis**      |       ✅       |       ✅        |\n| **Real-time overlay**              |       ✅       |       ✅        |\n| **Local AI \u002F offline mode**        |   ✅ Ollama    |       ❌        |\n| **Behavioral interview support**   |       ✅       |       ❌        |\n| **System design support**          |       ✅       |       ❌        |\n| **Meeting history & RAG**          |       ✅       |       ❌        |\n| **Any LLM (BYOK)**                 |       ✅       |    ❌ Locked    |\n| **Data stored locally**            |       ✅       |    ❌ Cloud     |\n\nNatively covers the full interview loop — not just the coding round.\n\n### vs Parakeet AI — Memory and History vs Stateless Overlay\n\nParakeet AI offers basic live meeting assistance but has no persistent memory, no meeting history, and no local vector search. Natively remembers your past meetings via local RAG, lets you ask questions across all your history, and gives you a full dashboard to manage, export, and search everything.\n\n---\n\n### Where we're not there yet\n\n- **No Linux support** — we are actively looking for maintainers to help bring Natively to Linux\n- **API key setup overhead** — you need to bring your own API keys (or install Ollama), which adds initial setup friction compared to all-in-one cloud tools\n- **No built-in mock interview mode** — Final Round AI has dedicated mock interview practice; Natively focuses on live, real-time assistance\n\n---\n\n## Why Natively?\n\nWhile other tools act as simple API wrappers, Natively is a complete, native intelligence system designed specifically for high-stakes meetings and interviews.\n\n- **Native Audio Capture (\u003C500ms):** Built with Rust and Zero-Copy ABI transfers, bypassing generic web-audio limitations for ultra-low latency.\n- **Dual-Channel Intelligence:** Distinct pipelines for system audio (what they say) and your microphone (what you dictate) ensuring perfect transcription without room noise.\n- **Battle-Tested Stealth Mode:** Completely undetectable. Hides from the dock, disables popups, and disguises the process during screen sharing.\n- **Rolling Context:** We don't just transcribe; we maintain a \"memory window\" of the conversation for smarter answers.\n- **Local RAG Memory:** We embed your meetings locally using SQLite vector search so you can ask, \"What did John say about the API last week?\"\n- **Rich Dashboard:** A full UI to manage, search, and export your history—not just a floating window.\n- **Fully Offline Capable:** Don't trust the cloud? Run Natively 100% offline using local Ollama models with limited basic telemetry.\n\n---\n\n\u003Cdiv align=\"center\">\n\n[![Portfolio](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FPortfolio-evinjohn.vercel.app-blueviolet?style=flat-square&logo=vercel&logoColor=white)](https:\u002F\u002Fevinjohn.vercel.app\u002F)\n[![LinkedIn](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FLinkedIn-Connect-0077B5?style=flat-square&logo=linkedin&logoColor=white)](https:\u002F\u002Fwww.linkedin.com\u002Fin\u002Fevinjohn\u002F)\n[![X](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FX-@evinjohnn-black?style=flat-square&logo=x&logoColor=white)](https:\u002F\u002Fx.com\u002Fevinjohnn)\n[![Hire Me](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FHire_Me-Contact-success?style=flat-square&logo=gmail&logoColor=white)](mailto:evinjohnn@gmail.com?subject=Natively%20-%20Hiring%20Inquiry)\n[![Buy Me A Coffee](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FBuy%20Me%20A%20Coffee-Support-FFDD00?style=flat-square&logo=buy-me-a-coffee&logoColor=black)](https:\u002F\u002Fwww.buymeacoffee.com\u002Fevinjohn)\n\n\u003C\u002Fdiv>\n\n---\n\n## Natively Pro\n\nWhile Natively is **free and open-source forever**, we also offer a **Pro Edition** designed specifically for power users and job seekers. Purchasing a Pro license gives you an edge in the job market, all while directly supporting the continued development of the open-source Natively core!\n\n### Free vs Pro Feature Comparison\n\n| Feature                                             | Natively Free | Natively Pro |\n| :-------------------------------------------------- | :-----------: | :----------: |\n| **Bring Your Own Key (BYOK) Models**                |      ✅       |      ✅      |\n| **Local AI Support (Ollama)**                       |      ✅       |      ✅      |\n| **Real-Time Speech-to-Text (\u003C500ms)**               |      ✅       |      ✅      |\n| **Live Contextual Assistant**                       |      ✅       |      ✅      |\n| **Screenshot & Slide OCR Analysis**                 |      ✅       |      ✅      |\n| **Undetectable & Stealth Modes**                    |      ✅       |      ✅      |\n| **Meeting Dashboard & Offline RAG History**         |      ✅       |      ✅      |\n| **Job Description (JD) & Resume Context Awareness** |      ❌       |      ✅      |\n| **Automated Company Research & Dossiers**           |      ❌       |      ✅      |\n| **Live Salary & Offer Negotiation Copilot**         |      ❌       |      ✅      |\n| **Priority Feature Access & Support**               |      ❌       |      ✅      |\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fevynignatious.gumroad.com\u002Fl\u002Fnatively\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FGet_Natively_Pro-Unlock_Premium-FF3366?style=for-the-badge&logo=gumroad&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n---\n\n### What's New in v2.0.8\n\nVersion 2.0.8 introduces major advancements in stealth routing, Mouse Passthrough Mode, Multimodal Groq support, and resolves critical bugs to ensure a seamless workflow.\n\n- **Multimodal Groq Support**: Integrated `meta-llama\u002Fllama-4-scout-17b-16e-instruct` into the ecosystem for high-speed screenshot analysis capability, with increased max completion tokens (up to 8192).\n- **Mouse Passthrough Mode**: Merged backend Electron mouse event management with full state-sync between the global keybind manager and the React renderer interface.\n- **Instant Stealth Boot & Windows Opacity Shield**: Refactored the app to immediately apply cached `isUndetectable` states on boot and implemented an opacity shield to eliminate a 1-frame screen flash when showing protected UI elements.\n- **Model Roster & Rotation Engine**: Updated default architecture models to utilize the latest generation `gpt-5.4-chat`, `gemini-3.1`, and `claude-sonnet-4-6`, while fortifying the 3-tier fallback mechanisms.\n- **Permanent Hide & Visibility Flaw**: Repaired a critical IPC routing bug where hiding the session UI dynamically misrouted `Cmd+B` unhide commands to the background Launcher, eliminating the \"invisible interface\" trap.\n- **SQLite-Vec Corruptions**: Fixed a critical silent data-corruption bug caused by tight strict dimensionality constraints (`float[1536]`), ensuring 100% of generated embeddings are retained and searchable.\n- **Groq Multimedia Drop**: Resolved a \"Front Door\" routing bug where image attachments bypassed the Groq engine completely and triggered false LLM connection errors.\n- **Critical Race Conditions & Thread Safety**: Eliminated dangerous global state mutations during LLM fallback loops and patched `SettingsManager` early-initialization fatal crashes.\n- **Memory & Resource Leaks**: Hardened native audio listener destruction to prevent zombie callbacks, and cleared dangling floating timeout IDs on opacity shields and disguise timers.\n- **Native Module Loader Pipeline (Cross-Platform Stability)**: Integrated a custom NAPI-RS absolute binary loader (`nativeModuleLoader.ts`) to entirely bypass POSIX-symlink `require` failures on Windows Git Bash, alongside numerous enhancements like rectifying hardcoded `natively.icns` dependencies.\n\n---\n\n## Table of Contents\n\n- [The free Cluely clone](#the-free-open-source-cluely-clone)\n- [3 things to know](#3-things-you-should-know-before-choosing-an-interview-ai)\n- [Demo](#demo)\n- [Full comparison](#full-comparison-natively-vs-cluely-vs-final-round-ai-vs-lockedin-ai-vs-interview-coder)\n- [Why Natively wins](#why-natively-wins)\n- [Why Natively?](#why-natively)\n- [Natively Pro](#natively-pro)\n- [What's New in v2.0.8](#whats-new-in-v208)\n- [Privacy & Security](#privacy--security-core-design-principle)\n- [Installation](#installation-developers--contributors)\n- [AI Providers](#ai-providers)\n- [Key Features](#key-features)\n- [Meeting Intelligence Dashboard](#meeting-intelligence-dashboard)\n- [Roadmap](#roadmap)\n- [Use Cases](#use-cases)\n- [Technical Details](#technical-details)\n- [Known Limitations](#known-limitations)\n- [Responsible Use](#responsible-use)\n- [Contributing](#contributing)\n- [License](#license)\n- [FAQ](#faq)\n- [Alternatives Natively replaces](#alternatives-natively-replaces)\n- [Star History](#star-history)\n\n---\n\n## What Is Natively?\n\n**Natively** is a **desktop AI assistant for live situations**:\n\n- Meetings\n- Interviews\n- Presentations\n- Classes\n- Professional conversations\n\nIt provides:\n\n- Live answers\n- Rolling conversational context\n- Screenshot and document understanding\n- Real-time speech-to-text\n- Instant suggestions for what to say next\n\nAll while remaining **invisible, fast, and privacy-first**.\n\n---\n\n## Privacy & Security (Core Design Principle)\n\n- 100% open source (AGPL-3.0)\n- Bring Your Own Keys (BYOK)\n- Local AI option (Ollama)\n- All data stored locally\n- Limited telemetry (basic GA4 counts)\n- No user data tracking\n- No hidden uploads\n\nYou explicitly control:\n\n- What runs locally\n- What uses cloud AI\n- Which providers are enabled\n\n---\n\n## Installation (Developers & Contributors)\n\n> [!NOTE]\n> **macOS Users (Both Apple Silicon & Intel Macs supported):**\n>\n> 1.  **\"Unidentified Developer\"**: If you see this, Right-click the app > Select **Open** > Click **Open**.\n> 2.  **\"App is Damaged\"**: If you see this, run the command in Terminal based on your download:\n>\n>     **For .zip downloads:**\n>\n>     ```bash\n>     xattr -cr \u002FApplications\u002FNatively.app\n>     ```\n>\n>     **For .dmg downloads:**\n>     1. Open Terminal and run:\n>        ```bash\n>        xattr -cr ~\u002FDownloads\u002FNatively-2.0.2-arm64.dmg # Or your specific filename\n>        ```\n>     2. Install the natively.dmg\n>     3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n### Prerequisites\n\n- Node.js (v20+ recommended)\n- Git\n- Rust (required for native audio capture)\n\n### AI Credentials & Speech Providers\n\n**Natively is 100% free to use with your own keys.**  \nConnect **any** speech provider and **any** LLM. No subscriptions, no markups, no hidden fees. All keys are stored locally.\n\n### Unlimited Free Transcription (Whisper, Google, Deepgram)\n\n- **Soniox** (API Key) - _Ultra-fast, highly accurate streaming STT_\n- **Google Cloud Speech-to-Text** (Service Account)\n- **Groq** (API Key)\n- **OpenAI Whisper** (API Key)\n- **Deepgram** (API Key)\n- **ElevenLabs** (API Key)\n- **Azure Speech Services** (API Key + Region)\n- **IBM Watson** (API Key + Region)\n\n### AI Engine Support (Bring Your Own Key)\n\nConnect Natively to **any** leading model or local inference engine.\n\n| Provider                     | Best For                                                    |\n| :--------------------------- | :---------------------------------------------------------- |\n| **Gemini 3.1 Series**        | Recommended: Massive context window (2M tokens) & low cost. |\n| **OpenAI (GPT-5.4 & o3)**    | High reasoning capabilities.                                |\n| **Anthropic (Claude 4.6)**   | Coding & complex nuanced tasks.                             |\n| **Groq (Llama 3.3\u002FScout 4)** | Insane speed (near-instant answers) & screenshot analysis.  |\n| **Ollama \u002F LocalAI**         | 100% Offline & Private (No API keys needed).                |\n| **OpenAI-Compatible**        | Connect to _any_ custom endpoint (vLLM, LM Studio, etc.)    |\n\n> **Note:** You only need ONE speech provider to get started. We recommend **Google STT** ,**Groq** or **Deepgram** for the fastest real-time performance.\n\n---\n\n#### To Use Google Speech-to-Text (Optional)\n\nYour credentials:\n\n- Never leave your machine\n- Are not logged, proxied, or stored remotely\n- Are used only locally by the app\n\nWhat You Need:\n\n- Google Cloud account\n- Billing enabled\n- Speech-to-Text API enabled\n- Service Account JSON key\n\nSetup Summary:\n\n1. Create or select a Google Cloud project\n2. Enable Speech-to-Text API\n3. Create a Service Account\n4. Assign role: `roles\u002Fspeech.client`\n5. Generate and download a JSON key\n6. Point Natively to the JSON file in settings\n\n---\n\n## Development Setup\n\n### Clone the Repository\n\n```bash\ngit clone https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant.git\ncd natively-cluely-ai-assistant\n```\n\n### Install Dependencies\n\n```bash\nnpm install\n```\n\n### Build Native Audio Module (Rust)\n\n```bash\nnpm run build:native\n```\n\n### Environment Variables\n\nCreate a `.env` file:\n\n```env\n# Cloud AI\nGEMINI_API_KEY=your_key\nGROQ_API_KEY=your_key\nOPENAI_API_KEY=your_key\nCLAUDE_API_KEY=your_key\nGOOGLE_APPLICATION_CREDENTIALS=\u002Fabsolute\u002Fpath\u002Fto\u002Fservice-account.json\n\n# Speech Providers (Optional - only one needed)\nDEEPGRAM_API_KEY=your_key\nELEVENLABS_API_KEY=your_key\nAZURE_SPEECH_KEY=your_key\nAZURE_SPEECH_REGION=eastus\nIBM_WATSON_API_KEY=your_key\nIBM_WATSON_REGION=us-south\n\n# Local AI (Ollama)\nUSE_OLLAMA=true\nOLLAMA_MODEL=llama3.2\nOLLAMA_URL=http:\u002F\u002Flocalhost:11434\n\n# Default Model Configuration\nDEFAULT_MODEL=gemini-3.1-flash-lite-preview\n```\n\n### Run (Development)\n\n```bash\nnpm start\n```\n\n### Build (Production)\n\n```bash\nnpm run dist\n```\n\nThis runs: Vite build → TypeScript compile → native module build → electron-builder\n\n---\n\n### AI Providers\n\n- **Custom (BYO Endpoint):** Paste any cURL command to use OpenRouter, DeepSeek, or private endpoints.\n- **Ollama (Local):** Zero-setup detection of local models (Llama 3, Mistral, Gemma).\n- **Dynamic Model Selection:** Preferred models (OpenAI, Anthropic, Google) now automatically appear across the app.\n- **Google Gemini:** First-class support for the Gemini 3.1 series.\n- **OpenAI:** GPT-5.4 and o3 series support with optimized system prompts.\n- **Anthropic:** Claude 4.6 series support with corrected max_tokens.\n- **Groq:** Ultra-fast text inference with Llama 3.3, and screenshot analysis using Llama 4 Scout.\n\n---\n\n## Key Features\n\n### Invisible Desktop Assistant\n\n- Always-on-top translucent overlay\n- Instantly hide\u002Fshow with shortcuts\n- Works across all applications\n\n### Real-time Interview Copilot & Coding Help\n\n- Real-time speech-to-text (**\u003C500ms latency**)\n- **Fast Response Mode**: Ultra-fast text responses using Groq Llama 3.3.\n- **Multilingual Support**: Choose from various response languages, and set speech recognition matching specific accents and dialects.\n- **Anti-Chatbot \u002F Human Persona System**: Refined system prompts and negative constraints ensure responses are concise, conversational, and indistinguishable from a real candidate (no robotic preambles or lectures).\n- Context-aware Memory (RAG) for Past Meetings\n- Instant answers as questions are asked\n- **Interim\u002FFinal Bridging**: Manual transcript finalization and interim bridging during recordings for higher accuracy.\n- Smart recap and summaries\n\n### Instant Screen & Slide Analysis (OCR) — AI Coding Interview Assistant\n\n- Works on **LeetCode, HackerRank, CoderPad, Codility, HackerEarth** and any browser-based coding environment\n- Capture a coding problem with one shortcut — get a full solution, explanation, and complexity analysis instantly\n- Invisible overlay never appears on screen share or recordings\n- Multiple screenshot support for multi-part problems\n- Smart fallback to Groq Llama 4 Scout if primary vision model fails\n\n### Premium Profile Intelligence\n\n- **Job Description & Resume Context**: Natively understands your background and the role you're applying for to provide highly tailored, context-aware answers.\n- **Company Research**: Get instant intelligence and dossiers on the company you are interviewing with.\n- **Negotiation Assistance**: Real-time guidance and strategy during offer and salary negotiations.\n\n### Contextual Actions\n\n- What should I answer?\n- Shorten response\n- Recap conversation\n- Suggest follow-up questions\n- Manual or voice-triggered prompts\n\n### Dual-Channel Audio Intelligence\n\nNatively understands that _listening_ to a meeting and _talking_ to an AI are different tasks. We treat them separately:\n\n- **System Audio (The Meeting):** Captures high-fidelity audio directly from your OS (fully supported on both macOS and Windows). It \"hears\" what your colleagues are saying without interference from your room noise.\n- **Sample Rate Auto-Detection**: Dynamically detects and syncs true hardware sample rates (e.g., automatically handling 48kHz audio interfaces or external microphones without distortion or downsampling artifacts).\n- **Two-Stage Silence Processing**: Combines adaptive RMS thresholds with **WebRTC Machine Learning VAD** to reject typing and fan noise.\n- **Microphone Input (Your Voice):** A dedicated channel for your voice commands and dictation. Toggle it instantly to ask Natively a private question without muting your meeting software.\n\n### Spotlight Search & Customization\n\n- Global activation shortcut (`Cmd+K` \u002F `Ctrl+K`)\n- **Custom Key Bindings**: Customize global shortcuts for easier control.\n- Instant answer overlay\n- Upcoming meeting readiness\n\n### Local RAG & Long-Term Memory\n\n- **Full Offline RAG:** All vector embeddings and retrieval happen locally (SQLite + `sqlite-vec`).\n- **Semantic Search:** innovative \"Smart Scope\" detects if you are asking about the current meeting or a past one.\n- **Sliding-Window RAG**: 50-token semantic overlap to prevent context loss across chunk boundaries.\n- **Epoch Summarization**: Smarter transcript memory management instead of hard truncation — no more losing early meeting context.\n- **Global Knowledge:** Ask questions across _all_ your past meetings (\"What did we decide about the API last month?\").\n- **Automatic Indexing:** Meetings are automatically chunked, embedded, and indexed in the background.\n\n### Advanced Privacy & Stealth\n\n- **Undetectable Mode:** Instantly hide from dock\u002Ftaskbar with visually locked selector to prevent state mismatches.\n- **Cross-Window State Sync**: Real-time state synchronization across Settings, Launcher, and Overlay windows.\n- **Process Disguise (Masquerading):** Instantly change the app to look like Terminal, System Settings, Activity Monitor, or other harmless utilities to completely evade detection during screen sharing.\n- **Security Hardening**: API keys are scrubbed from memory on app quit and credentials manager overwrites key data before disposal.\n- **API Rate Limiting**: Token-bucket algorithm (burst\u002Frefill) to prevent 429 errors on free-tier providers.\n- **Local-Only Processing:** All data stays on your machine.\n\n---\n\n## Meeting Intelligence Dashboard\n\nNatively includes a powerful, local-first meeting management system to review, search, and manage your entire conversation history.\n\n![Dashboard Preview](https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_aea01f311441.png)\n\n- **Meeting Archives:** Access full transcripts of every past meeting, searchable by keywords or dates.\n- **Smart Export:** One-click export of transcripts and AI summaries to **Markdown, JSON, or Text**—perfect for pasting into Notion, Obsidian, or Slack.\n- **Usage Statistics:** Track your token usage and API costs in real-time. Know exactly how much you are spending on Gemini, OpenAI, or Claude.\n- **Audio Separation:** Distinct controls for **System Audio** (what they say) vs. **Microphone** (what you dictate).\n- **Session Management:** Rename, organize, or delete past sessions to keep your workspace clean.\n\n---\n\n## Roadmap\n\n```mermaid\ntimeline\n    title Natively Product Roadmap\n    section Short-term (1-3 mos)\n        System Design : Visualization MVP\n        Personas : Basic system (3-5 personas)\n        Tokens : Integration POC\n    section Medium-term (3-6 mos)\n        Personas : Full library\n        System Design : Advanced diagram types\n        Tokens : Community features\n        Platform : Mobile app development\n    section Long-term (6+ mos)\n        Collaboration : Collaborative features\n        Ecosystem : Plugin ecosystem\n        Platform : Multi-language support\n```\n\n\u003Cdiv align=\"center\">\n  \u003Cem>For detailed feature descriptions, see our full \u003Ca href=\"ROADMAP.md\">ROADMAP.md\u003C\u002Fa>.\u003C\u002Fem>\n\u003C\u002Fdiv>\n\n---\n\n## Use Cases\n\n### Academic & Learning\n\n- **Live Assistance:** Get explanations for complex lecture topics in real-time.\n- **Translation:** Instant language translation during international classes.\n- **Problem Solving:** Immediate help with coding or mathematical problems.\n\n### Professional Meetings\n\n- **Interview Support:** Context-aware prompts to help you navigate technical questions.\n- **Sales & Client Calls:** Real-time clarification of technical specs or previous discussion points.\n- **Meeting Summaries:** Automatically extract action items and core decisions.\n\n### Development & Technical Work\n\n- **Code Insight:** Explain unfamiliar blocks of code or logic on your screen.\n- **Debugging:** Context-aware assistance for resolving logs or terminal errors.\n- **Architecture:** Guidance on system design and integration patterns.\n\n---\n\n## Architecture Overview\n\nNatively processes audio, screen context, and user input locally, maintains a rolling context window, and sends only the required prompt data to the selected AI provider (local or cloud).\n\nNo raw audio, screenshots, or transcripts are stored or transmitted unless explicitly enabled by the user.\n\n---\n\n## Technical Details\n\n### Tech Stack\n\n- **React, Vite, TypeScript, TailwindCSS**\n- **Electron**\n- **Rust** (native audio with **Zero-Copy ABI Transfers** via `napi::Buffer` — enabling continuous audio capture without V8 garbage collection pressure, achieving significantly lower latency and CPU usage than typical Electron-based competitors)\n- **SQLite** (local storage with `sqlite-vec`)\n\n### Supported Models\n\n- **Gemini 3.1 Series**\n- **OpenAI** (GPT-5.4, o3 series)\n- **Claude** (4.6 series)\n- **Ollama** (Llama, Mistral, CodeLlama)\n- **Groq** (Llama 3.3 for text, Llama 4 Scout for OCR)\n\n### System Requirements\n\n- **Minimum:** 4GB RAM\n- **Recommended:** 8GB+ RAM\n- **Optimal:** 16GB+ RAM for local AI\n\n---\n\n## Responsible Use\n\nNatively is intended for:\n\n- Learning\n- Productivity\n- Accessibility\n- Professional assistance\n\nUsers are responsible for complying with:\n\n- Workplace policies\n- Academic rules\n- Local laws and regulations\n\nThis project does not encourage misuse or deception.\n\n---\n\n## Known Limitations\n\n- Linux support is limited and actively looking for maintainers\n- Initial setup requires bringing your own API keys or installing Ollama\n- No built-in mock interview mode (focus is on live, real-time assistance)\n\n---\n\n## Contributing\n\nContributions are welcome! Please see our [CONTRIBUTING.md](CONTRIBUTING.md) for full guidelines on how to get started.\n\n- Bug fixes\n- Feature improvements\n- Documentation\n- UI\u002FUX enhancements\n- New AI integrations\n\nQuality pull requests will be reviewed and merged.\n\n### Maintainers\n\n| Maintainer                                 | Role          | Support                                                                                                                                                                     |\n| ------------------------------------------ | ------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n| [@evinjohnn](https:\u002F\u002Fgithub.com\u002Fevinjohnn) | macOS Build   | [![Buy Me a Coffee](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002F-buy_me_a_coffee-FFDD00?style=for-the-badge&logo=buy-me-a-coffee&logoColor=black)](https:\u002F\u002Fwww.buymeacoffee.com\u002Fevinjohnn) |\n| [@razllivan](https:\u002F\u002Fgithub.com\u002Frazllivan) | Windows Build | [![Buy Me a Coffee](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002F-buy_me_a_coffee-FFDD00?style=for-the-badge&logo=buy-me-a-coffee&logoColor=black)](https:\u002F\u002Fapp.lava.top\u002Frazllivan)         |\n\n---\n\n## License\n\nLicensed under the GNU Affero General Public License v3.0 (AGPL-3.0).\n\nIf you run or modify this software over a network, you must provide the full source code under the same license.\n\nThis repository contains the open-source core of the project.\n\nSome features available in official releases are part of the\ncommercial Premium Edition and are not included in this repository.\n\n> **Note:** This project is available for sponsorships, ads, or partnerships – perfect for companies in the AI, productivity, or developer tools space.\n\n---\n\n**Star this repo if Natively helps you succeed in meetings, interviews, or presentations!**\n\n---\n\n## FAQ\n\n#### Is Natively really free?\n\nYes. Natively is an open-source project. You only pay for what you use by bringing your own API keys (Gemini, OpenAI, Anthropic, etc.), or use it **100% free** by connecting to a local Ollama instance.\n\n#### Does Natively work with Zoom, Teams, and Google Meet?\n\nYes. Natively uses a Rust-based system audio capture that works universally across any desktop application, including Zoom, Microsoft Teams, Google Meet, Slack, and Discord.\n\n#### Is my data safe?\n\nNatively is built on **Privacy-by-Design**. All transcripts, vector embeddings (Local RAG), and keys are stored locally on your machine. We have no backend and collect only limited basic telemetry (no user data).\n\n#### Can I use it for technical interviews?\n\nNatively is a powerful assistant for any professional situation. However, users are responsible for complying with their company policies and interview guidelines.\n\n#### How do I use local models?\n\nSimply install **Ollama**, run a model (e.g., `ollama run llama3`), and Natively will automatically detect it. Enable \"Ollama\" in the AI Providers settings to switch to offline mode.\n\n#### How does Natively compare to Cluely?\n\nCluely is a $20\u002Fmonth cloud-based tool that stores all data on their servers. In mid-2025, Cluely suffered a data breach that exposed personal information, transcripts, and screenshots of 83,000 users. Natively is free, open-source, and stores everything locally. It supports any LLM (not just one vendor), offers local AI via Ollama, and has battle-tested stealth mode. Natively has never had a data breach because there is no server to breach.\n\n#### Is stealth mode actually undetectable?\n\nYes. Natively hides from the dock, disguises process names as harmless system utilities (Terminal, Activity Monitor, System Settings), and syncs state across all windows. It has been hardened across five major releases and tested against screen share detection in Zoom, Teams, and Google Meet.\n\n#### Does Natively work on LeetCode and HackerRank?\n\nYes. Natively's screenshot + OCR captures any visible coding problem and returns a full solution through the invisible overlay. It works on LeetCode, HackerRank, CoderPad, Codility, HackerEarth, Karat, and any browser-based coding environment.\n\n#### Is Natively detectable during coding interviews?\n\nFor standard online assessments (LeetCode, HackerRank, CoderPad, etc.), Natively is not detectable — it runs as a disguised system process and the overlay never appears in screen recordings or screen shares. It is **not** designed to bypass dedicated proctoring software like Pearson VUE, ProctorU, or Respondus Lockdown Browser, which operate at the OS level.\n\n#### Is Natively a free alternative to Interview Coder?\n\nYes. Natively does everything Interview Coder does — screenshot OCR, real-time coding assistance, invisible overlay — and adds behavioral interview support, system design help, local RAG memory, and any-LLM BYOK. All for free.\n\n---\n\n## Alternatives Natively Replaces\n\nNatively is a free, open-source alternative to:\n\n| Tool                | What Natively replaces                                                              |\n| :------------------ | :---------------------------------------------------------------------------------- |\n| **Cluely**          | Real-time AI meeting copilot — without the $20\u002Fmo fee or data breach risk           |\n| **Final Round AI**  | Live AI interview copilot — without the $149\u002Fmo fee or proctor-visible taskbar icon |\n| **LockedIn AI**     | Real-time interview assistant — without cloud lock-in or $70\u002Fmo                     |\n| **Interview Coder** | AI coding interview helper — with full meeting context, not just coding rounds      |\n| **Parakeet AI**     | Live meeting assistant — with local RAG memory and full history dashboard           |\n| **Metaview**        | Automated meeting notes — open-source and locally stored                            |\n| **Otter.ai**        | Transcription and meeting summaries — without cloud storage                         |\n| **Fireflies.ai**    | Meeting recorder and AI notetaker — fully local storage                             |\n| **Teal**            | Job search and interview assistant — fully local and free                           |\n\n---\n\n`ai-assistant` · `meeting-notes` · `interview-helper` · `cluely-alternative` · `cluely` · `lockhead-ai` · `lockheed-ai` · `parakeet-ai` · `interview-coder` · `final-round-ai` · `metaview-alternative` · `otter-ai-alternative` · `fireflies-alternative` · `local-ai` · `ollama` · `electron` · `privacy-first` · `open-source` · `real-time-transcription` · `interview-copilot` · `ai-meeting-assistant` · `moltbot`\n\n---\n\n## Support Natively\n\nThe community around **Natively** created a Pump.fun token to support the project.\n\nCreator rewards help cover **AI\u002FAPI bills** and ongoing development costs.\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fpump.fun\u002Fcoin\u002FB5opQ9euCVcJALeeCQbrFv5kePG8cCcoYqnXfx4Ppump\">\n    \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_08fe49304ab5.png\" alt=\"Support Natively on Pump.fun\" width=\"520\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n---\n\n## Star History\n\n\u003Ca href=\"https:\u002F\u002Fstar-history.com\u002F#evinjohnn\u002Fnatively-cluely-ai-assistant&Date\">\n \u003Cpicture>\n   \u003Csource media=\"(prefers-color-scheme: dark)\" srcset=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_faf4d432a1d3.png&theme=dark\" \u002F>\n   \u003Csource media=\"(prefers-color-scheme: light)\" srcset=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_faf4d432a1d3.png\" \u002F>\n   \u003Cimg alt=\"Star History Chart\" src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_faf4d432a1d3.png\" \u002F>\n \u003C\u002Fpicture>\n\u003C\u002Fa>\n\n\u003C!-- SEO: free ai interview copilot · cluely clone · cluely alternative · open source cluely · cluely free version · cluely github · interview coder alternative · interview coder free · interview coder open source · free interview coder · leetcode ai assistant · hackerrank ai helper · coding interview ai · undetectable coding interview ai · ai for leetcode · ai coding interview assistant · final round ai alternative · lockedin ai alternative · parakeet ai alternative · free interview cheating tool · ai interview assistant · real-time interview ai · undetectable interview ai · best ai interview tool 2025 2026 · open source interview ai · stealth interview ai · interview cheating software · ai copilot interview · free interview ai · cluely open source clone · leetcode cheating tool · hackerrank cheat · coding interview cheat sheet ai -->\n\n\u003Csub>\nfree-ai-interview-copilot · open-source-cluely · cluely-alternative · cluely-clone · interview-coder-alternative · final-round-ai-alternative · lockedin-ai-alternative · ai-interview-assistant · real-time-interview-ai · undetectable-interview-ai · stealth-mode · local-ai · ollama · byok · rag · electron · rust · privacy-first · meeting-assistant · interview-helper · open-source-interview-ai\n\u003C\u002Fsub>\n","\u003Cdiv align=\"center\">\n  \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_56f88165bf21.png\" width=\"150\" alt=\"Natively AI Assistant Logo\">\n\n# Natively — 免费、开源的 AI 面试副驾驶（Copilot）与会议助手\n\n**Cluely、Final Round AI、LockedIn AI 和 Interview Coder 的最佳免费替代方案。**  \n\u003Cbr\u002F>\n**与 Cluely 相同的 UI，更多功能，价格 $0，开源，无数据泄露风险。**  \n\u003Cbr\u002F>\n\n\u003Ca href=\"#\" style=\"pointer-events: none; cursor: default; color: inherit; text-decoration: none;\" title=\"#1 Cluely 克隆 · 免费 Cluely 替代品 · 开源面试副驾驶 · 免费 Interview Coder 替代品 · AI 编程面试助手 · LeetCode AI 解题器 · HackerRank AI 作弊工具 · Final Round AI 免费替代品 · LockedIn AI 替代品 · Parakeet AI 开源版 · 不可检测的面试 AI · 隐身模式面试副驾驶。Cluely 克隆 · Cluely 替代品 · 免费 Cluely · Cluely 免费版本 · 开源 Cluely ·\n    Final Round AI 克隆 · Final Round AI 替代品 · 免费 Final Round AI · Final Round AI 免费版本 ·\n    Interview Coder 克隆 · Interview Coder 替代品 · 免费 Interview Coder · 开源 Interview Coder ·\n    Parakeet AI 克隆 · Parakeet AI 替代品 · 免费 Parakeet AI ·\n    Wonsulting AI 克隆 · Wonsulting 替代品 · 免费 Wonsulting AI ·\n    Metaview 克隆 · Metaview 替代品 · 免费 Metaview ·\n    Sensei AI 克隆 · Sensei AI 替代品 · 面试副驾驶 ·\n    Hirevue AI 作弊工具 · Hirevue 助手 · Hirevue 辅助工具 ·\n    AI 面试助手 · AI 面试副驾驶 · AI 面试辅助工具 · 面试作弊工具 · 面试 AI ·\n    实时编程助手 · 实时编码帮助 · 屏幕叠加 AI · 隐形 AI 助手 ·\n    编程面试速查表 · LeetCode AI 辅助工具 · 系统设计 AI 助手 ·\n    Claude Code 替代品 · Claude Code 克隆 · 免费 Claude Code ·\n    Gemini 3.5 助手 · Gemini 3.5 Pro 编程 · Google Gemini 面试工具 ·\n    Agent Claw 替代品 · Agent Claw 克隆 · 免费 Agent Claw ·\n    Molt Bot 克隆 · Molt Bot 替代品 · 免费 Molt Bot ·\n    Antigravity AI 克隆 · Antigravity 替代品 ·\n    Devin AI 替代品 · 开源 Devin · 免费 Devin AI ·\n    Cursor AI 替代品 · Cursor 克隆 · 免费 Cursor AI ·\n    GitHub Copilot 替代品 · 免费 GitHub Copilot · 开源 Copilot ·\n    Tabnine 替代品 · 免费 Tabnine · Tabnine 克隆 ·\n    Codeium 替代品 · 免费 Codeium ·\n    智能体编程助手（Agentic coding assistant）· AI 结对程序员 · AI 编程副驾驶 ·\n    实时面试 AI · 实时面试助手 · 隐藏式面试工具 ·\n    开源面试副驾驶 · 免费面试 AI 工具 · 2026 年最佳面试 AI\">\u003C\u002Fa>\n\n\u003Cbr\u002F>\n\n[![License](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FLicense-AGPL--3.0-blue?style=flat-square)](LICENSE)\n[![Platform](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FPlatform-macOS%20%7C%20Windows-blueviolet?style=flat-square)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases)\n[![Downloads](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fdownloads\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Ftotal?style=flat-square&color=success)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases)\n![Repo Views](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FViews-70.4k-orange?style=flat-square)\n[![Stars](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant?style=flat-square&color=gold)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant)\n![Status](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FStatus-active-success?style=flat-square)\n[![X Community](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FCommunity-black?style=flat-square&logo=x&logoColor=white)](https:\u002F\u002Fx.com\u002Fi\u002Fcommunities\u002F2031398735515693507)\n\n> **竞品每月收费 $20–$149，将你的数据存储在他们的服务器上，其中一家已导致 83,000 名用户数据泄露。** Natively 完全免费，在本地运行，从未发生过数据泄露。你的密钥、你的模型、你的机器。\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fnatively.software\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FVisit%20Website-22C55E?style=for-the-badge&logo=vercel&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases\u002Flatest\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FDownload-macOS-007AFF?style=for-the-badge&logo=apple&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n  \u003Ca href=\"https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases\u002Flatest\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FDownload-Windows-0078D4?style=for-the-badge&logo=windows&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n\u003Csmall>需要 macOS 12+（Apple Silicon 与 Intel）或 Windows 10\u002F11\u003C\u002Fsmall>\n\n\u003Cbr\u002F>\n\n**\u003Cspan style=\"color: #f97316\">🔥 49.4k 次浏览\u003C\u002Fspan>** &nbsp;·&nbsp; **\u003Cspan style=\"color: #22c55e\">💸 $0 vs 竞品 $149\u002F月\u003C\u002Fspan>** &nbsp;·&nbsp; **\u003Cspan style=\"color: #3b82f6\">⚡ 延迟 \u003C500ms\u003C\u002Fspan>** &nbsp;·&nbsp; **\u003Cspan style=\"color: #a855f7\">🛡️ 0 次数据泄露\u003C\u002Fspan>**\n\n\u003C\u002Fdiv>\n\n---\n\n## 免费、开源的 Cluely 克隆版\n\nNatively 最初是对 Cluely 界面的像素级复刻，随后不断进化。如果你用过 Cluely，你已经知道如何使用 Natively。相同的屏幕叠加层（overlay）、相同的工作流、相同的快捷键。不同的是：它免费、开源、本地运行、支持任意大语言模型（LLM），且从未泄露过任何用户数据。\n\n> 正在寻找 **免费的 Cluely 替代品**？一个 **开源的 Cluely 克隆版**？你找对了。\n\n---\n\n## 免费 AI 编程面试助手 — 在 LeetCode、HackerRank 与 CoderPad 上不可被检测\n\nNatively 可作为标准在线测评中的 **免费、不可检测的 AI 编程面试助手**。它会捕获你的屏幕，分析题目，并通过一个隐形的叠加层实时提供提示、解答和解释 —— 完全不会干扰你的编码环境。\n\n**已在以下平台验证不可被检测：**\n\n- LeetCode（包括 LeetCode 竞赛）\n- HackerRank\n- CoderPad\n- Codility\n- HackerEarth\n- Karat\n- 任何基于浏览器的编码环境\n\n**工作原理：**\n\n1. 使用快捷键一键截图题目\n2. Natively 通过 OCR 识别题目内容，并将其发送至你选择的 AI（GPT、Claude、Gemini 或本地 Ollama）\n3. 回复显示在隐形叠加层中 —— 绝不会出现在屏幕共享画面中\n\n> ⚠️ **重要提示：** Natively 并非用于绕过 **Pearson VUE**、**ProctorU** 或 **Respondus Lockdown Browser** 等专用监考软件 —— 这些软件在操作系统层面运行，属于完全不同的类别。对于没有专用监考软件的标准在线编程测评，Natively 的隐身模式无法被检测到。\n\n## 选择面试 AI 前你应该知道的 3 件事\n\n1. **Cluely** 在 2025 年年中发生数据泄露事件，导致 83,000 名用户的个人信息、访谈文字记录和截图被曝光 —— Natively 将所有数据本地存储，仅包含有限的基础遥测（telemetry），且从未发生过任何数据泄露。\n2. **Final Round AI** 每月收费 149 美元，其任务栏图标会被监考软件识别 —— Natively 免费、开源，并拥有经过实战检验的不可检测隐身模式（stealth mode）。\n3. **LockedIn AI** 每月收费 55–70 美元，并强制用户使用其云端大语言模型（LLM），不提供本地运行选项 —— Natively 允许你使用任意模型（GPT、Claude、Gemini、Llama），或通过 Ollama 完全离线运行。\n\n---\n\n\u003Cdiv align=\"center\">\n\n### ⭐ 给本项目点个 Star —— 这很重要\n\n每一个 Star 都能让 Natively 在 GitHub 搜索结果中排名更高，帮助开发者和求职者发现这个免费、注重隐私的替代方案，而不是每月花 149 美元购买那些将你的数据存储在他人服务器上的工具。\n\n[![Star Natively](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant?style=for-the-badge&color=gold&label=Star%20on%20GitHub)](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant)\n\n\u003C\u002Fdiv>\n\n---\n\n## 演示\n\n![Natively AI Assistant Demo - Real-time Interview Helper and Transcription](https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_d88ef33e7e0b.gif)\n\n此演示展示了 **一个完整的实时会议场景**：\n\n- 会议进行时的实时转录\n- 跨多个发言者的滚动上下文感知（rolling context awareness）\n- 对共享幻灯片的截图分析\n- 即时生成下一步该说的话\n- 后续问题与简洁回答\n- 所有功能均实时运行，无需录制或后期处理\n\n---\n\n## 完整对比：Natively vs Cluely vs Final Round AI vs LockedIn AI vs Interview Coder\n\n| 功能                     | Natively                   | Cluely               | Pluely     | LockedIn AI      | Final Round AI         |\n| :----------------------- | :------------------------- | :------------------- | :--------- | :--------------- | :--------------------- |\n| **价格**                 | ✅ 免费（BYOK）            | ⚠️ 20 美元\u002F月        | ✅ 免费    | ❌ 55–70 美元\u002F月 | ❌ 149 美元\u002F月         |\n| **开源**                 | ✅ AGPL-3.0                | ❌                   | ✅         | ❌               | ❌                     |\n| **本地数据 \u002F 隐私保护**  | ✅ 是                      | ❌ 云端服务器        | ✅ 是      | ❌ 云端服务器    | ❌ 云端服务器          |\n| **支持任意 LLM（BYOK）** | ✅ 是                      | ❌ 厂商锁定          | ⚠️ 有限    | ❌ 厂商锁定      | ❌ 厂商锁定            |\n| **本地 AI（Ollama）**    | ✅ 是                      | ❌                   | ❌         | ❌               | ❌                     |\n| **实时性（\u003C500ms）**     | ✅ 是                      | ⚠️ 延迟 5–90 秒      | ✅ 是      | ✅ ~116ms        | ⚠️ 最慢                |\n| **双音频通道**           | ✅ 系统 + 麦克风           | ❌ 单一流            | ❌         | ❌               | ❌                     |\n| **本地 RAG 记忆**        | ✅ SQLite + sqlite-vec     | ❌                   | ❌         | ❌               | ❌                     |\n| **会议历史**             | ✅ 完整仪表盘              | ⚠️ 有限              | ❌         | ❌               | ⚠️ 有限                |\n| **截图 OCR**             | ✅ 是                      | ⚠️ 有限              | ❌         | ✅ 是            | ⚠️ 有限                |\n| **隐身模式**             | ✅ 不可检测                | ❌                   | ❌         | ❌               | ❌ 监考软件可见        |\n| **进程伪装**             | ✅ 终端、设置等            | ❌                   | ❌         | ❌               | ❌                     |\n| **简历与上下文支持**     | ✅ 专业级                  | ❌                   | ❌         | ✅ 是            | ✅ 是                  |\n| **数据泄露历史**         | ✅ 无                      | ❌ 83,000 用户泄露   | ✅ 无      | ✅ 无            | ✅ 无                  |\n\n> **图例说明**：✅ 完全支持 · ⚠️ 部分或有限支持 · ❌ 不支持\n\n---\n\n## 为什么 Natively 更胜一筹\n\n### 对比 Cluely —— 泄露了 83,000 名用户数据\n\nNatively 的界面设计有意保持熟悉感 —— 如果你用过 Cluely，几乎无需学习成本。\n\nCluely 在 2025 年年中的数据泄露事件中，暴露了 83,000 名用户的个人信息、完整面试文字记录和截图。你在面试中说的每一句话都被存储在其服务器上，随后遭到泄露。而他们竟为此每月收取 20 美元。\n\nNatively 没有后端、没有服务器，仅包含基础遥测（仅用于 GA4 安装追踪，不收集任何用户数据）。你的文字记录、API 密钥和截图永远不会离开你的设备。整个代码库以 AGPL-3.0 开源协议发布，可公开审计。零泄露、零数据收集 —— 对于一款监听你面试内容的工具来说，这是唯一可接受的标准。\n\n### 对比 LockedIn AI —— 每月 70 美元的云端锁定\n\nLockedIn AI 是同类工具中最贵的，每月收费 55–70 美元。它强制你使用单一云端大语言模型（LLM），不提供本地推理选项。每一条文字记录和回复都会经过其服务器。\n\nNatively 支持所有主流模型（Gemini、GPT、Claude、Groq），只需自带密钥（BYOK），并通过 Ollama 提供 100% 离线模式。你只需为你实际使用的 API Token 付费，或者完全免费地在本地运行 Llama 3。无需订阅，也无厂商锁定。\n\n### 对比 Final Round AI —— 每月 149 美元且会被监考软件识别\n\nFinal Round AI 是最昂贵的选择，每月 149 美元，主要针对面试前准备和模拟面试，但在实时延迟方面却是同类中最慢的。关键问题是，其任务栏图标会被监考软件识别，在受监控的面试中极易被发现。\n\nNatively 利用基于 Rust 的原生音频捕获技术和零拷贝 ABI 传输（Zero-Copy ABI Transfers），实现端到端延迟低于 500ms。其不可检测的隐身模式可从 Dock 中隐藏、伪装进程名称，并在所有窗口间同步状态 —— 经过五个主要版本的实战检验和强化。\n\n### 对比 Pluely —— 轻量但功能有限\n\nPluely 是一个不错的轻量级替代品（约 10MB，基于 Tauri），并支持 Linux，而 Natively 目前尚未提供 Linux 支持。这一点值得肯定。\n\n但 Pluely 仅是一个基础覆盖层（overlay）。它没有本地 RAG、没有会议历史、不支持双音频通道，也没有仪表盘。Natively 是一套完整的智能系统：通过本地向量搜索记住你过去的会议，分离系统音频与麦克风输入，并提供完整的管理仪表盘，支持导出为 Markdown、JSON 和纯文本格式。\n\n### vs Interview Coder — 更强大，完全免费\n\nInterview Coder 是一款专注于编程面试辅助的付费工具。而 Natively 不仅原生支持 Interview Coder 的所有功能，还提供更多能力，并且完全免费：\n\n|                                    |    Natively    | Interview Coder |\n| :--------------------------------- | :------------: | :-------------: |\n| **价格**                           | ✅ 免费（BYOK）|     ❌ 付费      |\n| **开源**                           |       ✅       |       ❌        |\n| **支持 LeetCode \u002F HackerRank**     |       ✅       |       ✅        |\n| **截图 + OCR 分析**                |       ✅       |       ✅        |\n| **实时叠加层（overlay）**          |       ✅       |       ✅        |\n| **本地 AI \u002F 离线模式**             |   ✅ Ollama    |       ❌        |\n| **行为面试支持**                   |       ✅       |       ❌        |\n| **系统设计支持**                   |       ✅       |       ❌        |\n| **会议历史 & RAG（检索增强生成）** |       ✅       |       ❌        |\n| **任意大语言模型（BYOK）**         |       ✅       |    ❌ 锁定      |\n| **数据本地存储**                   |       ✅       |    ❌ 云端      |\n\nNatively 覆盖完整的面试流程——不仅限于编程环节。\n\n### vs Parakeet AI — 带记忆与历史 vs 无状态叠加层\n\nParakeet AI 提供基础的实时会议辅助，但没有持久化记忆、会议历史记录，也不支持本地向量搜索。Natively 通过本地 RAG 记住你过去的会议内容，允许你跨所有历史记录提问，并提供完整的仪表盘来管理、导出和搜索所有内容。\n\n---\n\n### 我们尚未完善之处\n\n- **暂不支持 Linux** —— 我们正在积极寻找维护者，帮助将 Natively 移植到 Linux 平台  \n- **API 密钥设置开销** —— 你需要自行提供 API 密钥（或安装 Ollama），相比一体化的云端工具，初始设置略显繁琐  \n- **无内置模拟面试模式** —— Final Round AI 提供专门的模拟面试练习；Natively 则专注于实时辅助  \n\n---\n\n## 为什么选择 Natively？\n\n当其他工具只是简单的 API 封装时，Natively 是一个完整的、原生构建的智能系统，专为高风险会议和面试场景设计。\n\n- **原生音频捕获（\u003C500ms）**：基于 Rust 构建，采用零拷贝 ABI 传输，绕过通用 Web 音频限制，实现超低延迟。  \n- **双通道智能处理**：分别为系统音频（对方所说内容）和麦克风输入（你口述内容）建立独立处理管道，确保精准转录，不受环境噪音干扰。  \n- **久经考验的隐身模式**：完全无法被检测。隐藏于 Dock 栏、禁用弹窗，并在屏幕共享时伪装进程。  \n- **滚动上下文（Rolling Context）**：我们不仅做转录，还会维护对话的“记忆窗口”，以提供更智能的回答。  \n- **本地 RAG 记忆**：使用 SQLite 向量搜索在本地嵌入你的会议记录，让你可以提问：“上周 John 对 API 说了什么？”  \n- **丰富仪表盘**：提供完整 UI 界面，用于管理、搜索和导出历史记录——不只是一个浮动窗口。  \n- **完全离线可用**：不信任云端？你可以使用本地 Ollama 模型 100% 离线运行 Natively，并仅启用有限的基础遥测功能。  \n\n---\n\n\u003Cdiv align=\"center\">\n\n[![Portfolio](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FPortfolio-evinjohn.vercel.app-blueviolet?style=flat-square&logo=vercel&logoColor=white)](https:\u002F\u002Fevinjohn.vercel.app\u002F)\n[![LinkedIn](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FLinkedIn-Connect-0077B5?style=flat-square&logo=linkedin&logoColor=white)](https:\u002F\u002Fwww.linkedin.com\u002Fin\u002Fevinjohn\u002F)\n[![X](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FX-@evinjohnn-black?style=flat-square&logo=x&logoColor=white)](https:\u002F\u002Fx.com\u002Fevinjohnn)\n[![Hire Me](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FHire_Me-Contact-success?style=flat-square&logo=gmail&logoColor=white)](mailto:evinjohnn@gmail.com?subject=Natively%20-%20Hiring%20Inquiry)\n[![Buy Me A Coffee](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FBuy%20Me%20A%20Coffee-Support-FFDD00?style=flat-square&logo=buy-me-a-coffee&logoColor=black)](https:\u002F\u002Fwww.buymeacoffee.com\u002Fevinjohn)\n\n\u003C\u002Fdiv>\n\n---\n\n## Natively Pro\n\n虽然 Natively **永远免费且开源**，我们也同时提供专为高级用户和求职者设计的 **Pro 版本**。购买 Pro 许可证不仅能让你在求职市场中占据优势，还能直接支持 Natively 开源核心的持续开发！\n\n### 免费版 vs Pro 版功能对比\n\n| 功能                                               | Natively 免费版 | Natively Pro 版 |\n| :-------------------------------------------------- | :-----------: | :----------: |\n| **自带密钥模型（BYOK Models）**                     |      ✅       |      ✅      |\n| **本地 AI 支持（Ollama）**                          |      ✅       |      ✅      |\n| **实时语音转文字（\u003C500ms）**                        |      ✅       |      ✅      |\n| **实时上下文助手**                                  |      ✅       |      ✅      |\n| **截图与幻灯片 OCR 分析**                           |      ✅       |      ✅      |\n| **不可检测 & 隐身模式**                             |      ✅       |      ✅      |\n| **会议仪表盘 & 离线 RAG 历史**                      |      ✅       |      ✅      |\n| **职位描述（JD）与简历上下文感知**                  |      ❌       |      ✅      |\n| **自动公司调研与资料包生成**                        |      ❌       |      ✅      |\n| **实时薪资与 Offer 谈判副驾驶**                     |      ❌       |      ✅      |\n| **优先功能访问与技术支持**                          |      ❌       |      ✅      |\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fevynignatious.gumroad.com\u002Fl\u002Fnatively\">\n    \u003Cimg src=\"https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FGet_Natively_Pro-Unlock_Premium-FF3366?style=for-the-badge&logo=gumroad&logoColor=white\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n### v2.0.8 新增功能\n\nv2.0.8 版本在**隐形路由（stealth routing）**、**鼠标穿透模式（Mouse Passthrough Mode）**、**多模态 Groq 支持**等方面实现了重大改进，并修复了关键性 Bug，确保工作流无缝顺畅。\n\n- **多模态 Groq 支持**：将 `meta-llama\u002Fllama-4-scout-17b-16e-instruct` 集成到生态系统中，实现高速截图分析能力，并将最大生成 token 数提升至 8192。\n- **鼠标穿透模式**：将后端 Electron 的鼠标事件管理与全局快捷键管理器和 React 渲染界面之间的状态完全同步。\n- **即时隐形启动 & 窗口不透明度保护罩**：重构应用逻辑，使其在启动时立即应用缓存的 `isUndetectable` 状态，并引入不透明度保护罩，彻底消除显示受保护 UI 元素时出现的单帧屏幕闪烁问题。\n- **模型阵容与轮换引擎**：默认架构模型已更新为最新一代的 `gpt-5.4-chat`、`gemini-3.1` 和 `claude-sonnet-4-6`，同时强化了三层故障转移机制。\n- **永久隐藏与可见性缺陷**：修复了一个关键的 IPC 路由 Bug：动态隐藏会话 UI 时，`Cmd+B` 取消隐藏命令被错误地路由到后台 Launcher，导致用户陷入“界面不可见”的陷阱。\n- **SQLite-Vec 数据损坏问题**：修复了一个因严格维度约束（`float[1536]`）导致的静默数据损坏 Bug，确保 100% 生成的嵌入（embeddings）都能被保留并可检索。\n- **Groq 多媒体丢弃问题**：解决了“前门（Front Door）”路由 Bug：图像附件完全绕过了 Groq 引擎，触发了虚假的 LLM 连接错误。\n- **关键竞态条件与线程安全**：消除了 LLM 故障转移循环期间危险的全局状态突变，并修复了 `SettingsManager` 早期初始化导致的致命崩溃。\n- **内存与资源泄漏**：强化了原生音频监听器的销毁逻辑，防止僵尸回调；同时清理了不透明度保护罩和伪装计时器中悬空的浮动超时 ID。\n- **原生模块加载器管道（跨平台稳定性）**：集成了自定义 NAPI-RS 绝对二进制加载器（`nativeModuleLoader.ts`），完全绕过 Windows Git Bash 上因 POSIX 符号链接导致的 `require` 失败问题，并进行了多项增强，例如修正硬编码的 `natively.icns` 依赖。\n\n---\n\n## 目录\n\n- [免费的 Cluely 克隆版](#the-free-open-source-cluely-clone)\n- [选择面试 AI 前需知的 3 件事](#3-things-you-should-know-before-choosing-an-interview-ai)\n- [演示](#demo)\n- [完整对比](#full-comparison-natively-vs-cluely-vs-final-round-ai-vs-lockedin-ai-vs-interview-coder)\n- [为何 Natively 胜出](#why-natively-wins)\n- [为何选择 Natively？](#why-natively)\n- [Natively Pro](#natively-pro)\n- [v2.0.8 新增功能](#whats-new-in-v208)\n- [隐私与安全](#privacy--security-core-design-principle)\n- [安装指南（开发者与贡献者）](#installation-developers--contributors)\n- [AI 提供商](#ai-providers)\n- [核心功能](#key-features)\n- [会议智能仪表盘](#meeting-intelligence-dashboard)\n- [路线图](#roadmap)\n- [使用场景](#use-cases)\n- [技术细节](#technical-details)\n- [已知限制](#known-limitations)\n- [负责任的使用](#responsible-use)\n- [贡献指南](#contributing)\n- [许可证](#license)\n- [常见问题](#faq)\n- [Natively 替代的竞品](#alternatives-natively-replaces)\n- [Star 历史](#star-history)\n\n---\n\n## Natively 是什么？\n\n**Natively** 是一款面向**实时场景**的**桌面 AI 助手**：\n\n- 会议\n- 面试\n- 演示\n- 课堂\n- 专业对话\n\n它提供：\n\n- 实时回答\n- 持续对话上下文\n- 截图与文档理解\n- 实时语音转文字（speech-to-text）\n- 即时建议下一句话该说什么\n\n全程保持**隐形、快速、以隐私为先**。\n\n---\n\n## 隐私与安全（核心设计原则）\n\n- 100% 开源（AGPL-3.0 许可证）\n- 自带密钥（Bring Your Own Keys, BYOK）\n- 本地 AI 选项（Ollama）\n- 所有数据本地存储\n- 极简遥测（仅基础 GA4 计数）\n- 不追踪用户数据\n- 无隐藏上传\n\n您明确控制：\n\n- 哪些功能在本地运行\n- 哪些功能使用云端 AI\n- 启用哪些提供商\n\n---\n\n## 安装指南（开发者与贡献者）\n\n> [!NOTE]\n> **macOS 用户（支持 Apple Silicon 与 Intel Mac）：**\n>\n> 1.  **“来自未识别开发者”**：若出现此提示，请右键点击应用 > 选择 **打开** > 点击 **打开**。\n> 2.  **“应用程序已损坏”**：若出现此提示，请根据您的下载方式在终端中运行以下命令：\n>\n>     **对于 .zip 下载：**\n>\n>     ```bash\n>     xattr -cr \u002FApplications\u002FNatively.app\n>     ```\n>\n>     **对于 .dmg 下载：**\n>     1. 打开终端并运行：\n>        ```bash\n>        xattr -cr ~\u002FDownloads\u002FNatively-2.0.2-arm64.dmg # 或您具体的文件名\n>        ```\n>     2. 安装 natively.dmg\n>     3. 打开终端并运行：`xattr -cr \u002FApplications\u002FNatively.app`\n\n### 先决条件\n\n- Node.js（推荐 v20+）\n- Git\n- Rust（用于原生音频捕获）\n\n### AI 凭据与语音提供商\n\n**Natively 完全免费，只需您自己的密钥即可使用。**  \n可连接**任意**语音提供商和**任意**大语言模型（LLM）。无订阅、无加价、无隐藏费用。所有密钥均本地存储。\n\n### 无限免费转录（Whisper、Google、Deepgram）\n\n- **Soniox**（API Key） - _超快、高精度的流式语音转文字（STT）_\n- **Google Cloud Speech-to-Text**（服务账号）\n- **Groq**（API Key）\n- **OpenAI Whisper**（API Key）\n- **Deepgram**（API Key）\n- **ElevenLabs**（API Key）\n- **Azure 语音服务**（API Key + 区域）\n- **IBM Watson**（API Key + 区域）\n\n### AI 引擎支持（自带密钥，Bring Your Own Key）\n\n原生连接 **任意** 主流大模型或本地推理引擎。\n\n| 提供商（Provider）           | 最佳适用场景                                                |\n| :--------------------------- | :---------------------------------------------------------- |\n| **Gemini 3.1 系列**          | 推荐：超大上下文窗口（200 万 tokens）且成本低廉。           |\n| **OpenAI (GPT-5.4 & o3)**    | 高阶推理能力。                                              |\n| **Anthropic (Claude 4.6)**   | 编程与复杂精细任务。                                        |\n| **Groq (Llama 3.3\u002FScout 4)** | 极致速度（近乎即时响应）与截图分析。                        |\n| **Ollama \u002F LocalAI**         | 100% 离线 & 隐私（无需 API 密钥）。                         |\n| **OpenAI 兼容接口**          | 连接 _任意_ 自定义端点（如 vLLM、LM Studio 等）。           |\n\n> **注意**：你只需配置 **一个** 语音识别（Speech-to-Text）提供商即可开始使用。我们推荐 **Google STT**、**Groq** 或 **Deepgram** 以获得最快的实时性能。\n\n---\n\n#### 使用 Google Speech-to-Text（可选）\n\n你的凭证：\n\n- 永远不会离开你的设备\n- 不会被记录、代理或远程存储\n- 仅由本地应用程序使用\n\n你需要准备：\n\n- Google Cloud 账号\n- 已启用计费功能\n- 已启用 Speech-to-Text API\n- 服务账号（Service Account）的 JSON 密钥文件\n\n设置步骤概要：\n\n1. 创建或选择一个 Google Cloud 项目  \n2. 启用 Speech-to-Text API  \n3. 创建一个服务账号  \n4. 分配角色：`roles\u002Fspeech.client`  \n5. 生成并下载 JSON 密钥文件  \n6. 在应用设置中指定该 JSON 文件路径  \n\n---\n\n## 开发环境搭建\n\n### 克隆仓库\n\n```bash\ngit clone https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant.git\ncd natively-cluely-ai-assistant\n```\n\n### 安装依赖\n\n```bash\nnpm install\n```\n\n### 构建原生音频模块（Rust）\n\n```bash\nnpm run build:native\n```\n\n### 环境变量\n\n创建 `.env` 文件：\n\n```env\n# 云 AI\nGEMINI_API_KEY=your_key\nGROQ_API_KEY=your_key\nOPENAI_API_KEY=your_key\nCLAUDE_API_KEY=your_key\nGOOGLE_APPLICATION_CREDENTIALS=\u002Fabsolute\u002Fpath\u002Fto\u002Fservice-account.json\n\n# 语音提供商（可选 - 仅需一个）\nDEEPGRAM_API_KEY=your_key\nELEVENLABS_API_KEY=your_key\nAZURE_SPEECH_KEY=your_key\nAZURE_SPEECH_REGION=eastus\nIBM_WATSON_API_KEY=your_key\nIBM_WATSON_REGION=us-south\n\n# 本地 AI（Ollama）\nUSE_OLLAMA=true\nOLLAMA_MODEL=llama3.2\nOLLAMA_URL=http:\u002F\u002Flocalhost:11434\n\n# 默认模型配置\nDEFAULT_MODEL=gemini-3.1-flash-lite-preview\n```\n\n### 运行（开发模式）\n\n```bash\nnpm start\n```\n\n### 构建（生产模式）\n\n```bash\nnpm run dist\n```\n\n此命令依次执行：Vite 构建 → TypeScript 编译 → 原生模块构建 → electron-builder 打包\n\n---\n\n### AI 提供商支持\n\n- **自定义端点（BYO Endpoint）**：粘贴任意 cURL 命令即可使用 OpenRouter、DeepSeek 或私有端点。\n- **Ollama（本地）**：零配置自动检测本地模型（如 Llama 3、Mistral、Gemma）。\n- **动态模型选择**：首选模型（OpenAI、Anthropic、Google）现在会在整个应用中自动显示。\n- **Google Gemini**：对 Gemini 3.1 系列提供一流支持。\n- **OpenAI**：支持 GPT-5.4 和 o3 系列，并优化了系统提示（system prompts）。\n- **Anthropic**：支持 Claude 4.6 系列，并修正了 max_tokens 参数。\n- **Groq**：使用 Llama 3.3 实现超快文本推理，并通过 Llama 4 Scout 实现截图分析。\n\n---\n\n## 核心功能\n\n### 隐形桌面助手\n\n- 始终置顶的半透明覆盖层\n- 快捷键即时隐藏\u002F显示\n- 支持所有应用程序\n\n### 实时面试副驾 & 编程辅助\n\n- 实时语音转文字（**延迟 \u003C500ms**）\n- **快速响应模式**：使用 Groq Llama 3.3 实现超快文本回复\n- **多语言支持**：可选择多种回复语言，并为语音识别匹配特定口音和方言\n- **反聊天机器人 \u002F 拟人化系统**：通过精炼的系统提示和负面约束，确保回复简洁、自然，与真实候选人无异（无机械式开场白或说教）\n- 基于上下文的记忆（RAG）用于过往会议\n- 在问题提出时即时回答\n- **临时\u002F最终转录桥接**：在录音过程中手动完成转录并桥接临时结果，提高准确性\n- 智能回顾与摘要\n\n### 即时屏幕 & 幻灯片分析（OCR）—— AI 编程面试助手\n\n- 支持 **LeetCode、HackerRank、CoderPad、Codility、HackerEarth** 及任何基于浏览器的编程环境\n- 一键快捷键捕获编程题目 —— 立即获得完整解答、解释及复杂度分析\n- 隐形覆盖层不会出现在屏幕共享或录制画面中\n- 支持多张截图处理多部分题目\n- 若主视觉模型失败，智能回退至 Groq Llama 4 Scout\n\n### 高级个人资料智能\n\n- **职位描述 & 简历上下文**：原生理解你的背景和应聘职位，提供高度定制化、上下文感知的回答\n- **公司调研**：即时获取你面试公司的深度情报与档案\n- **谈判辅助**：在薪资与录用谈判中提供实时策略指导\n\n### 上下文操作\n\n- 我该怎么回答？\n- 缩短回复\n- 回顾对话\n- 建议后续问题\n- 手动或语音触发提示\n\n### 双通道音频智能\n\n原生理解“听会议”和“与 AI 对话”是两项不同任务，并分别处理：\n\n- **系统音频（会议声音）**：直接从操作系统捕获高保真音频（macOS 和 Windows 均完全支持），能清晰“听到”同事发言，不受房间环境噪音干扰。\n- **采样率自动检测**：动态检测并同步真实硬件采样率（例如自动处理 48kHz 音频接口或外接麦克风，避免失真或降采样伪影）。\n- **两级静音处理**：结合自适应 RMS 阈值与 **WebRTC 机器学习语音活动检测（VAD）**，有效过滤打字声和风扇噪音。\n- **麦克风输入（你的声音）**：专用于你的语音指令和口述输入。可即时切换，向 Natively 提出私密问题而无需关闭会议软件的麦克风。\n\n### Spotlight 搜索与自定义\n\n- 全局激活快捷键（`Cmd+K` \u002F `Ctrl+K`）\n- **自定义按键绑定**：可自定义全局快捷键，便于控制\n- 即时答案覆盖层\n- 即将开始的会议准备状态\n\n### 本地 RAG（检索增强生成）与长期记忆\n\n- **完全离线 RAG**：所有向量嵌入（vector embeddings）和检索均在本地完成（SQLite + `sqlite-vec`）。\n- **语义搜索**：创新的“智能范围（Smart Scope）”可自动判断你是在询问当前会议还是过去的某次会议。\n- **滑动窗口 RAG**：50 个 token 的语义重叠，防止在分块边界处丢失上下文。\n- **纪元摘要（Epoch Summarization）**：采用更智能的转录文本内存管理机制，替代硬性截断——不再丢失会议早期的上下文。\n- **全局知识**：可跨**所有**过往会议提问（例如：“上个月我们对 API 做了什么决定？”）。\n- **自动索引**：会议内容会在后台自动分块、嵌入并建立索引。\n\n### 高级隐私与隐蔽性\n\n- **不可检测模式（Undetectable Mode）**：一键从 Dock\u002F任务栏隐藏，并通过视觉锁定的选择器防止状态不一致。\n- **跨窗口状态同步**：在设置、启动器和叠加窗口之间实现实时状态同步。\n- **进程伪装（Masquerading）**：可立即将应用伪装成终端、系统设置、活动监视器或其他无害工具，在屏幕共享时彻底规避被发现。\n- **安全加固**：退出应用时会从内存中清除 API 密钥，且凭据管理器会在释放密钥前覆盖关键数据。\n- **API 速率限制**：采用令牌桶算法（突发\u002F补充机制），避免在免费套餐提供商处触发 429 错误。\n- **纯本地处理**：所有数据均保留在你的设备上。\n\n---\n\n## 会议智能仪表盘\n\n原生集成强大且以本地优先的会议管理系统，用于回顾、搜索和管理你的全部对话历史。\n\n![Dashboard Preview](https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_aea01f311441.png)\n\n- **会议存档**：访问每场过往会议的完整转录文本，支持按关键词或日期搜索。\n- **智能导出**：一键将转录文本和 AI 摘要导出为 **Markdown、JSON 或纯文本**——非常适合粘贴到 Notion、Obsidian 或 Slack 中。\n- **使用统计**：实时追踪你的 token 使用量和 API 成本，清晰掌握在 Gemini、OpenAI 或 Claude 上的具体花费。\n- **音频分离**：分别控制 **系统音频**（对方所说内容）和 **麦克风**（你口述的内容）。\n- **会话管理**：重命名、整理或删除过往会话，保持工作区整洁。\n\n---\n\n## 路线图\n\n```mermaid\ntimeline\n    title Natively 产品路线图\n    section 短期（1-3 个月）\n        系统设计 : 可视化 MVP\n        用户角色 : 基础系统（3-5 个角色）\n        Token : 集成概念验证\n    section 中期（3-6 个月）\n        用户角色 : 完整库\n        系统设计 : 高级图表类型\n        Token : 社区功能\n        平台 : 移动应用开发\n    section 长期（6+ 个月）\n        协作 : 协同功能\n        生态系统 : 插件生态\n        平台 : 多语言支持\n```\n\n\u003Cdiv align=\"center\">\n  \u003Cem>如需详细功能说明，请参阅完整的 \u003Ca href=\"ROADMAP.md\">ROADMAP.md\u003C\u002Fa>。\u003C\u002Fem>\n\u003C\u002Fdiv>\n\n---\n\n## 使用场景\n\n### 学术与学习\n\n- **实时辅助**：在课堂上即时获取复杂知识点的解释。\n- **翻译**：在国际课程中实现即时语言翻译。\n- **问题求解**：立即获得编程或数学问题的帮助。\n\n### 专业会议\n\n- **面试支持**：提供上下文感知的提示，助你应对技术问题。\n- **销售与客户通话**：实时澄清技术规格或之前讨论的要点。\n- **会议摘要**：自动提取行动项和核心决策。\n\n### 开发与技术工作\n\n- **代码洞察**：解释屏幕上你不熟悉的代码块或逻辑。\n- **调试**：针对日志或终端错误提供上下文感知的协助。\n- **架构设计**：提供系统设计和集成模式的指导。\n\n---\n\n## 架构概览\n\nNatively 在本地处理音频、屏幕上下文和用户输入，维护一个滚动上下文窗口，并仅将所需的提示数据发送至所选的 AI 提供商（本地或云端）。\n\n除非用户明确启用，否则不会存储或传输原始音频、截图或转录文本。\n\n---\n\n## 技术细节\n\n### 技术栈\n\n- **React, Vite, TypeScript, TailwindCSS**\n- **Electron**\n- **Rust**（通过 `napi::Buffer` 实现**零拷贝 ABI 传输**的原生音频处理——支持持续音频捕获，避免 V8 垃圾回收压力，相比典型 Electron 应用显著降低延迟和 CPU 占用）\n- **SQLite**（使用 `sqlite-vec` 的本地存储）\n\n### 支持的模型\n\n- **Gemini 3.1 系列**\n- **OpenAI**（GPT-5.4, o3 系列）\n- **Claude**（4.6 系列）\n- **Ollama**（Llama, Mistral, CodeLlama）\n- **Groq**（Llama 3.3 用于文本，Llama 4 Scout 用于 OCR）\n\n### 系统要求\n\n- **最低配置**：4GB RAM\n- **推荐配置**：8GB+ RAM\n- **最佳体验**：16GB+ RAM（用于本地 AI）\n\n---\n\n## 负责任的使用\n\nNatively 旨在用于：\n\n- 学习\n- 提升生产力\n- 辅助无障碍使用\n- 专业协助\n\n用户需自行遵守：\n\n- 工作场所政策\n- 学术规范\n- 当地法律法规\n\n本项目不鼓励任何滥用或欺骗行为。\n\n---\n\n## 已知限制\n\n- Linux 支持有限，正在积极招募维护者\n- 初始设置需用户自行提供 API 密钥或安装 Ollama\n- 无内置模拟面试模式（聚焦于实时、现场辅助）\n\n---\n\n## 贡献指南\n\n欢迎贡献！请参阅我们的 [CONTRIBUTING.md](CONTRIBUTING.md) 获取完整的入门指南。\n\n- Bug 修复\n- 功能改进\n- 文档撰写\n- UI\u002FUX 优化\n- 新的 AI 集成\n\n高质量的 Pull Request 将被审核并合并。\n\n### 维护者\n\n| 维护者                                 | 职责          | 支持                                                                                                                                                                     |\n| ------------------------------------------ | ------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n| [@evinjohnn](https:\u002F\u002Fgithub.com\u002Fevinjohnn) | macOS 构建   | [![Buy Me a Coffee](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002F-buy_me_a_coffee-FFDD00?style=for-the-badge&logo=buy-me-a-coffee&logoColor=black)](https:\u002F\u002Fwww.buymeacoffee.com\u002Fevinjohnn) |\n| [@razllivan](https:\u002F\u002Fgithub.com\u002Frazllivan) | Windows 构建 | [![Buy Me a Coffee](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002F-buy_me_a_coffee-FFDD00?style=for-the-badge&logo=buy-me-a-coffee&logoColor=black)](https:\u002F\u002Fapp.lava.top\u002Frazllivan)         |\n\n## 许可证\n\n本项目依据 GNU Affero 通用公共许可证 v3.0（AGPL-3.0）授权。\n\n如果您通过网络运行或修改此软件，则必须在相同许可证下提供完整的源代码。\n\n本仓库包含该项目的开源核心部分。\n\n官方版本中提供的某些功能属于商业版 Premium Edition，不包含在此仓库中。\n\n> **注意：** 本项目接受赞助、广告或合作——非常适合 AI、生产力工具或开发者工具领域的公司。\n\n---\n\n**如果 Natively 帮助您在会议、面试或演讲中取得成功，请为本仓库点个 Star！**\n\n---\n\n## 常见问题（FAQ）\n\n#### Natively 真的是免费的吗？\n\n是的。Natively 是一个开源项目。您只需自带 API 密钥（如 Gemini、OpenAI、Anthropic 等）按实际使用付费，或者通过连接本地 Ollama 实例实现 **100% 免费** 使用。\n\n#### Natively 是否支持 Zoom、Teams 和 Google Meet？\n\n是的。Natively 使用基于 Rust 的系统音频捕获技术，可在任何桌面应用程序中通用工作，包括 Zoom、Microsoft Teams、Google Meet、Slack 和 Discord。\n\n#### 我的数据安全吗？\n\nNatively 遵循 **隐私优先设计（Privacy-by-Design）** 原则。所有转录文本、向量嵌入（本地 RAG）和密钥均存储在您的本地设备上。我们没有后端服务器，仅收集有限的基本遥测数据（不含用户数据）。\n\n#### 我能用它参加技术面试吗？\n\nNatively 是适用于任何专业场景的强大助手。但用户需自行遵守所在公司的政策及面试准则。\n\n#### 如何使用本地模型？\n\n只需安装 **Ollama**，运行一个模型（例如 `ollama run llama3`），Natively 将自动检测到它。在 AI 提供商设置中启用 “Ollama” 即可切换至离线模式。\n\n#### Natively 与 Cluely 相比如何？\n\nCluely 是一款每月 20 美元的云端工具，所有数据都存储在其服务器上。2025 年年中，Cluely 发生了一起数据泄露事件，导致 83,000 名用户的个人信息、对话记录和截图被曝光。而 Natively 是免费且开源的，所有数据均本地存储。它支持任意大语言模型（LLM，Large Language Model），不仅限于单一供应商，并通过 Ollama 提供本地 AI 能力，还具备经过实战检验的隐身模式。由于 Natively 没有服务器，因此从未发生过数据泄露。\n\n#### 隐身模式真的无法被检测到吗？\n\n是的。Natively 会从程序坞（Dock）中隐藏自身，将进程名称伪装成无害的系统工具（如 Terminal、Activity Monitor、System Settings），并在所有窗口间同步状态。该功能已在五个主要版本中不断强化，并在 Zoom、Teams 和 Google Meet 中测试过屏幕共享检测。\n\n#### Natively 能在 LeetCode 和 HackerRank 上使用吗？\n\n可以。Natively 的截图 + OCR 功能可捕获任何可见的编程题目，并通过不可见的叠加层返回完整解答。它适用于 LeetCode、HackerRank、CoderPad、Codility、HackerEarth、Karat 以及任何基于浏览器的编程环境。\n\n#### 在编程面试中会被检测到吗？\n\n对于标准在线测评（如 LeetCode、HackerRank、CoderPad 等），Natively 不会被检测到——它以伪装的系统进程运行，且叠加层不会出现在屏幕录制或屏幕共享中。但它 **并非** 用于绕过 Pearson VUE、ProctorU 或 Respondus Lockdown Browser 等操作系统级的专业监考软件。\n\n#### Natively 是 Interview Coder 的免费替代品吗？\n\n是的。Natively 能完成 Interview Coder 所做的所有事情——截图 OCR、实时编程辅助、不可见叠加层——并额外提供行为面试支持、系统设计帮助、本地 RAG 记忆以及支持任意 LLM 的自带密钥（BYOK, Bring Your Own Key）功能。而且全部免费。\n\n---\n\n## Natively 可替代的工具\n\nNatively 是以下工具的免费开源替代方案：\n\n| 工具                | Natively 替代的功能                                                              |\n| :------------------ | :---------------------------------------------------------------------------------- |\n| **Cluely**          | 实时 AI 会议副驾 —— 无需每月 20 美元费用或数据泄露风险                             |\n| **Final Round AI**  | 实时 AI 面试副驾 —— 无需每月 149 美元费用或监考可见的任务栏图标                    |\n| **LockedIn AI**     | 实时面试助手 —— 无需云绑定或每月 70 美元                                          |\n| **Interview Coder** | AI 编程面试助手 —— 不仅限于编程环节，还包含完整的会议上下文                       |\n| **Parakeet AI**     | 实时会议助手 —— 支持本地 RAG 记忆和完整历史记录仪表盘                             |\n| **Metaview**        | 自动会议纪要 —— 开源且本地存储                                                    |\n| **Otter.ai**        | 转录与会议摘要 —— 无需云存储                                                      |\n| **Fireflies.ai**    | 会议录音与 AI 记录员 —— 完全本地存储                                              |\n| **Teal**            | 求职与面试助手 —— 完全本地化且免费                                                |\n\n---\n\n`ai-assistant` · `meeting-notes` · `interview-helper` · `cluely-alternative` · `cluely` · `lockhead-ai` · `lockheed-ai` · `parakeet-ai` · `interview-coder` · `final-round-ai` · `metaview-alternative` · `otter-ai-alternative` · `fireflies-alternative` · `local-ai` · `ollama` · `electron` · `privacy-first` · `open-source` · `real-time-transcription` · `interview-copilot` · `ai-meeting-assistant` · `moltbot`\n\n---\n\n## 支持 Natively\n\n围绕 **Natively** 的社区在 Pump.fun 上创建了一个代币以支持该项目。\n\n创作者奖励有助于覆盖 **AI\u002FAPI 费用** 和持续开发成本。\n\n\u003Cp align=\"center\">\n  \u003Ca href=\"https:\u002F\u002Fpump.fun\u002Fcoin\u002FB5opQ9euCVcJALeeCQbrFv5kePG8cCcoYqnXfx4Ppump\">\n    \u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_08fe49304ab5.png\" alt=\"Support Natively on Pump.fun\" width=\"520\" \u002F>\n  \u003C\u002Fa>\n\u003C\u002Fp>\n\n## Star History（Star 历史）\n\n\u003Ca href=\"https:\u002F\u002Fstar-history.com\u002F#evinjohnn\u002Fnatively-cluely-ai-assistant&Date\">\n \u003Cpicture>\n   \u003Csource media=\"(prefers-color-scheme: dark)\" srcset=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_faf4d432a1d3.png&theme=dark\" \u002F>\n   \u003Csource media=\"(prefers-color-scheme: light)\" srcset=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_faf4d432a1d3.png\" \u002F>\n   \u003Cimg alt=\"Star History Chart\" src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_readme_faf4d432a1d3.png\" \u002F>\n \u003C\u002Fpicture>\n\u003C\u002Fa>\n\n\u003C!-- SEO: 免费 AI 面试副驾驶 · cluely 克隆版 · cluely 替代方案 · 开源 cluely · cluely 免费版本 · cluely GitHub · interview coder 替代方案 · interview coder 免费版 · interview coder 开源版 · 免费 interview coder · LeetCode AI 助手 · HackerRank AI 辅助工具 · 编程面试 AI · 无法被检测的编程面试 AI · LeetCode 专用 AI · AI 编程面试助手 · Final Round AI 替代方案 · LockedIn AI 替代方案 · Parakeet AI 替代方案 · 免费面试作弊工具 · AI 面试助手 · 实时面试 AI · 无法被检测的面试 AI · 2025 2026 年最佳 AI 面试工具 · 开源面试 AI · 隐蔽模式面试 AI · 面试作弊软件 · AI 面试副驾驶 · 免费面试 AI · cluely 开源克隆版 · LeetCode 作弊工具 · HackerRank 作弊工具 · 编程面试 AI 速查表 -->\n\n\u003Csub>\nfree-ai-interview-copilot · open-source-cluely · cluely-alternative · cluely-clone · interview-coder-alternative · final-round-ai-alternative · lockedin-ai-alternative · ai-interview-assistant · real-time-interview-ai · undetectable-interview-ai · stealth-mode · local-ai · ollama · byok（自带密钥） · rag（检索增强生成） · electron · rust · privacy-first · meeting-assistant · interview-helper · open-source-interview-ai\n\u003C\u002Fsub>","# Natively — 免费开源 AI 面试助手快速上手指南\n\n## 环境准备\n\n- **操作系统**：  \n  - macOS 12+（支持 Apple Silicon 和 Intel）  \n  - Windows 10\u002F11\n- **依赖项**：\n  - 若使用本地模型（如 Llama 3），需提前安装 [Ollama](https:\u002F\u002Follama.com\u002F)\n  - 若使用云端模型（如 GPT、Claude、Gemini），需准备对应平台的 API Key\n- **网络建议**：  \n  国内用户访问 GitHub 或模型 API 可能较慢，建议配置代理或使用镜像加速（如 FastGit）\n\n## 安装步骤\n\n1. 访问 [GitHub Releases 页面](https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Freleases\u002Flatest) 下载对应系统的安装包：\n   - macOS 用户下载 `.dmg` 文件\n   - Windows 用户下载 `.exe` 文件\n\n2. 安装应用：\n   - **macOS**：双击 `.dmg` 文件，将应用拖入 `Applications` 文件夹\n   - **Windows**：双击 `.exe` 文件，按提示完成安装\n\n> 💡 国内用户若下载缓慢，可尝试通过 [FastGit](https:\u002F\u002Fdoc.fastgit.org\u002Fzh-cn\u002F) 加速 GitHub 资源下载。\n\n## 基本使用\n\n1. **首次启动配置**：\n   - 打开 Natively 应用\n   - 在设置中选择 AI 模型提供商（如 OpenAI、Anthropic、Google Gemini 或 Ollama）\n   - 输入你的 API Key（若使用本地模型则无需填写）\n\n2. **在编程面试中使用**：\n   - 进入 LeetCode \u002F HackerRank 等在线编程界面\n   - 使用快捷键（默认 `Cmd+Shift+P` 或 `Ctrl+Shift+P`）截取题目区域\n   - Natively 自动 OCR 识别题目并发送给 AI\n   - 答案与提示将显示在**透明悬浮窗**中，不会出现在屏幕共享画面里\n\n3. **在会议中使用**：\n   - 启动会议（如 Zoom、Teams）\n   - Natively 自动开始实时转录、分析上下文并提供发言建议\n   - 所有数据仅保存在本地，不会上传至任何服务器\n\n> ✅ 提示：Natively 默认启用“隐身模式”，适用于无专用监考软件（如 Pearson VUE）的标准在线笔试\u002F面试环境。","一位正在准备Meta后端工程师岗位终面的应届生，需要在45分钟内完成一道系统设计题和一道LeetCode Hard编码题，同时还要应对面试官的实时追问。\n\n### 没有 natively-cluely-ai-assistant 时\n- 面试中遇到不熟悉的分布式缓存设计问题，只能凭记忆硬答，逻辑混乱且遗漏关键点。\n- 编码环节卡在边界条件处理上，没有即时辅助，浪费大量时间调试却无法通过全部测试用例。\n- 使用在线AI工具担心被录屏或网络请求暴露，不敢启用任何外部帮助，全程高度紧张。\n- 依赖付费工具如Final Round AI，每月支出近百元，且需上传面试内容到第三方服务器，存在隐私泄露风险。\n- 面试结束后无法回溯具体问答细节，复盘效率低，难以针对性提升。\n\n### 使用 natively-cluely-ai-assistant 后\n- 开启“隐身模式”后，AI在本地实时分析面试问题，通过屏幕角落轻量提示系统设计的关键组件（如一致性哈希、LRU淘汰策略），回答更有条理。\n- 编码时自动调用本地RAG检索相似LeetCode题解，在不切换窗口的情况下提供精准代码片段建议，快速修复逻辑漏洞。\n- 所有数据仅在本机处理，自带BYOK（自带密钥）支持，彻底规避监考软件检测和数据外泄风险，心理负担大幅减轻。\n- 完全免费开源，无需订阅，本地运行不依赖云端服务，即使网络不稳定也能稳定辅助。\n- 面试结束后自动生成带时间戳的转录与AI交互日志，便于逐帧复盘技术盲点和表达问题。\n\nnatively-cluely-ai-assistant 让开发者在真实面试压力下获得安全、私密且零成本的智能协同，真正把注意力聚焦在解决问题本身。","https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fevinjohnn_natively-cluely-ai-assistant_aea01f31.png","evinjohnn","evin","https:\u002F\u002Foss.gittoolsai.com\u002Favatars\u002Fevinjohnn_8d955acc.jpg","😮‍💨",null,"https:\u002F\u002Fgithub.com\u002Fevinjohnn",[82,86,90,93,97,101],{"name":83,"color":84,"percentage":85},"TypeScript","#3178c6",92.7,{"name":87,"color":88,"percentage":89},"Rust","#dea584",2.5,{"name":91,"color":92,"percentage":23},"Python","#3572A5",{"name":94,"color":95,"percentage":96},"JavaScript","#f1e05a",1.8,{"name":98,"color":99,"percentage":100},"CSS","#663399",0.9,{"name":102,"color":103,"percentage":104},"HTML","#e34c26",0.1,877,196,"2026-04-05T17:33:13","AGPL-3.0","macOS, Windows","未说明",{"notes":112,"python":110,"dependencies":113},"支持 macOS 12+（Apple Silicon 和 Intel）以及 Windows 10\u002F11；可选集成 Ollama 实现本地离线 AI 推理；需用户自行提供大模型 API 密钥（如 GPT、Claude、Gemini 等）；默认不收集用户数据，所有处理均在本地完成。",[],[15,13,14,26],[116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135],"ai","ai-assistant","claude-code","cluely-alternative","cluely-clone","interview","interviewcoder","openclaw","developer-tools","llm","meeting-assistant","open-source","moltbook","moltbot","trending","trending-repositories","cluely","cheating","final-round-ai","claude-skills","2026-03-27T02:49:30.150509","2026-04-06T07:14:52.815328",[139,144,149,154,159,164],{"id":140,"question_zh":141,"answer_zh":142,"source_url":143},161,"是否支持使用本地语音识别（如 FasterWhisper）或本地 AI 模型？","项目早期版本曾支持本地 STT（语音转文本），但由于性能不佳（即使在优化 Metal 加速的 MacBook 上表现也不理想），现已切换为基于云的 STT 服务（如 Deepgram）。Deepgram 提供免费额度（200 美元）和 WebSocket 流式传输。虽然目前未内置本地模型选项，但作为开源项目，社区建议未来可考虑提供自定义 STT 端点配置功能。","https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Fissues\u002F86",{"id":145,"question_zh":146,"answer_zh":147,"source_url":148},162,"使用 OpenAI 模型时提示“Unsupported parameter: 'max_tokens'”怎么办？","某些 OpenAI 模型（如新版 GPT）已弃用 max_tokens 参数，应改用 max_completion_tokens。临时解决方案是切换到 Gemini 或 Groq 等其他推理模型，开发者已在后续版本中修复此问题。","https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Fissues\u002F20",{"id":150,"question_zh":151,"answer_zh":152,"source_url":153},163,"Windows 上音频无法被检测或转录（尤其是使用 OpenAI Whisper 时）如何解决？","OpenAI Whisper 的 WebSocket 实现有 Bug，可能导致音频无法识别。建议暂时改用 Google Speech-to-Text：需创建 Google Cloud 项目、启用 Speech-to-Text API、创建服务账号并分配 roles\u002Fspeech.client 角色，然后下载 JSON 密钥文件并在 Natively 设置中指定该文件路径。ElevenLabs 和 Google STT 已验证可用。","https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Fissues\u002F84",{"id":155,"question_zh":156,"answer_zh":157,"source_url":158},164,"上传简历（PDF\u002FDOCX）时报错“All reasoning models failed for structured generation”怎么办？","该问题是已知 Bug，已在 v2.0.7 版本中修复。请升级到最新版本。若仍失败，可尝试将简历转换为纯文本（.txt）或 Markdown（.md）格式再上传，并确保使用的 AI 模型（如 Claude）支持文档解析。","https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Fissues\u002F97",{"id":160,"question_zh":161,"answer_zh":162,"source_url":163},165,"Deepgram STT API 密钥连接时报 401 错误如何处理？","401 错误通常表示 API 密钥无效或权限不足。请确认：1）密钥正确无误；2）密钥具有访问 Deepgram STT API 的权限（检查密钥作用域）。重新生成一个有效密钥即可解决问题。","https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Fissues\u002F78",{"id":165,"question_zh":166,"answer_zh":167,"source_url":168},166,"在 Google Meet 全屏共享时，“不可检测模式”（Undetectable Mode）失效怎么办？","在开始屏幕共享后，再手动开启“不可检测模式”即可生效。部分用户反馈需在共享过程中切换该模式才能隐藏应用窗口。","https:\u002F\u002Fgithub.com\u002Fevinjohnn\u002Fnatively-cluely-ai-assistant\u002Fissues\u002F49",[170,175,180,184,188,193,198,203,208,213,218,223,228,233,238,243,248,253,258,263],{"id":171,"version":172,"summary_zh":173,"released_at":174},109538,"v2.2.0","## Summary\r\n\r\nMajor feature release introducing Live Negotiation Coaching, Code Hint mode, Brainstorm mode, an auto-expanding code panel, a full light mode overhaul, and critical stability & stealth improvements across the board.\r\n\r\n## What's New\r\n\r\n- **Live Negotiation Coaching** — Real-time salary negotiation advisor powered by a state machine that tracks offer phases, extracts amounts, and streams LLM-generated tactical guidance with a 5-second timeout and fallback copy\r\n- **Code Hint Mode (⌘6)** — Context-aware senior-reviewer hint streamed from screenshots; auto-extracts the problem statement on first press if no prior Solve was run; intelligently ages stale screenshot questions so multi-round interview sessions stay accurate\r\n- **Brainstorm Mode (⌘7)** — Streams 2–3 spoken, trade-off-aware approaches with brute-force → optimal pivot; integrates with the dynamic Recap\u002FBrainstorm action button configurable per session from the Settings popup\r\n- **4-Phase Interview Teleprompter** — Replaced the old solution view with a structured Understand → Brainstorm → Implement → Verify teleprompter with spoken scripts, colour-coded phase cards, and complexity pills\r\n- **Auto-Expanding Code Panel** — Overlay window smoothly expands to 1.3× width (780 px) when a code block is in view and contracts on scroll; driven by Framer Motion springs with zero re-renders per frame and a 2-call OS window sync strategy\r\n\r\n## Improvements\r\n\r\n- **Dynamic Recap\u002FBrainstorm button** — Cmd+4 action is now a user-configurable toggle between Recap and Brainstorm modes persisted to settings and broadcast across all windows\r\n- **Natively API Promo Toaster redesign** — Iridescent animated border (Aurora UI, 6 s loop), breathing glow, SVG grain texture, gradient headlines, and `useReducedMotion` gating for accessibility\r\n- **Full light mode system** — Research-backed Professional SaaS palette (#F5F5F5 canvas \u002F #FFFFFF cards \u002F gray-900 text \u002F blue-600 accent); all major components (Launcher, Settings, ModelSelector, JDAwareness, Premium, Profile, and Remote toasters) updated with depth-correct semantic tokens\r\n- **Company Intel Beta pill** — Added Beta badge and disclaimer text to the dossier results section\r\n- **Profile Intelligence settings tab** — Reordered navigation so Profile Intelligence appears immediately after Natively API\r\n- **Verbose debug logging toggle** — Amber-accented terminal icon toggle in General settings; gates all audio pipeline diagnostic logs and STT segment sampling behind the flag\r\n- **Microphone device selection** — Native Rust resolver honors user-selected microphone with exact-match → case-insensitive fallback → hard error listing available devices; \"Default\" sentinel check is now case-insensitive\r\n\r\n## Fixes\r\n\r\n- **Focus steal eliminated** — `showInactive()` used throughout toggle, screenshot-restore, cropper, and model-selector paths; `setAlwaysOnTop` only called when value changes; dock show\u002Fhide manages presence without calling `focus()`\r\n- **RAG global search** — `embedding_provider` SQL filter was incorrectly excluding most meetings; fixed to ensure cross-session retrieval works correctly\r\n- **Silence timer bar freeze** — Changed object-identity check `m === msg` to `m.id === msg.id` in the silence timer callback; spread-reconstructed arrays always invalidated the old reference before the 5-second timer fired\r\n- **Negotiation tracker cross-session bleed** — JD replacement now correctly resets the negotiation tracker; `deleteDocumentsByType` only resets on RESUME delete, not JD delete\r\n- **Log rotation** — 10 MB cap with `.log.1` rollover prevents unbounded log growth on long-running sessions\r\n- **Concurrent audio test guard** — `_audioTestStarting` boolean prevents two concurrent calls from both passing the async permission gate\r\n- **Fast mode persistence** — Settings restoration moved to an unconditional startup path so the \"Fast Response Mode\" flag survives restarts\r\n- **Light\u002Fdark theme flash** — Theme cached in `localStorage` and applied synchronously before `ReactDOM.createRoot`; `body` background changed from hardcoded `#000000` to `transparent` to eliminate black flash in light mode\r\n- **Dark mode regressions** — Launcher hero section, Settings rows, About creator pill, MeetingDetails tabs, FeatureSpotlight, and SettingsPopup toggle track all restored to original dark values after light-mode passes\r\n- **Code hint auto-screenshot** — `generate-code-hint` and `generate-brainstorm` handlers now auto-take a fresh screenshot (`restoreFocus=false`) before reading the queue to prevent stale prior screenshots being sent to the AI\r\n\r\n## Technical\r\n\r\n- **CI smoke test** — New `build-smoke.yml` workflow builds renderer + electron main process and asserts `dist-electron\u002Felectron\u002Fmain.js` exists\r\n- **Native build hardening** — `build:native` added to `app:build` pipeline; macOS defaults to current-arch only (`NATIVELY_BUILD_ALL_MAC_ARCHES=1` for dual build); `verifyArtifacts()` throws on missing `.node` files (fail","2026-04-04T01:46:41",{"id":176,"version":177,"summary_zh":178,"released_at":179},109539,"2.1.0","## Summary\r\n\r\nNatively 2.1.0 resolves a critical issue with premium license activations and bumps the app version for a fresh release.\r\n\r\n## What's New\r\n\r\n- Version bumped to 2.1.0 for the latest stable release.\r\n\r\n## Improvements\r\n\r\n- Improved compatibility with Dodo Payments API endpoints. \r\n\r\n## Fixes\r\n\r\n- Fixed a critical issue where valid Dodo Payments license keys failed to activate (\"activation failed: status=201, err=unknown error\") due to the system improperly handling HTTP `201 Created` responses.\r\n- Ensures all new premium license activations complete the checkout and activation flow smoothly.\r\n\r\n## Technical\r\n\r\n- Rebuilt the Rust native module to correctly parse HTTP 2xx success statuses during activation checks.\r\n- App binary repackaged and resigned for cross-platform distribution.\r\n\r\n## ⚠️macOS Installation (Unsigned Build)\r\n\r\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\r\n\r\nIf you see \"App is damaged\":\r\n\r\n- **For .zip downloads:**\r\n  1. Move the app to your Applications folder.\r\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n- **For .dmg downloads:**\r\n  1. Open Terminal and run:\r\n     ```bash\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.1.0-arm64.dmg\r\n     # Or for Intel Macs:\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.1.0-x64.dmg\r\n     ```\r\n  2. Install the natively.dmg\r\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n## ⚠️Windows Installation (Unsigned Build)\r\n\r\nWhen running the installer on Windows, you might see a \"Windows protected your PC\" warning from Microsoft Defender SmartScreen saying it prevented an unrecognized app from starting. \r\n\r\nSince this is an unsigned build, this is expected. You can safely ignore it by clicking **More info** and then **Run anyway**.\r\n\r\n\\\\ refer to changes.md for detailed changes\r\n","2026-03-31T06:31:23",{"id":181,"version":182,"summary_zh":79,"released_at":183},109540,"v2.1.0-beta.2","2026-04-01T22:02:02",{"id":185,"version":186,"summary_zh":79,"released_at":187},109541,"v2.1.0-beta.1","2026-03-31T23:34:01",{"id":189,"version":190,"summary_zh":191,"released_at":192},109542,"v2.0.9","## Summary\r\n\r\nNatively 2.0.9 brings the major audio transcription fixes, native module stability improvements, stealth & shortcut hardening, and a wave of under-the-hood quality improvements from merged PRs.\r\n\r\n## What's New\r\n\r\n- **Dual-channel STT** — system audio and microphone can now connect to the Natively API simultaneously using per-channel session keys (`system` \u002F `mic`), eliminating `concurrent_session_blocked` errors\r\n- **Screen Recording permission flow** — proactive TCC dialog triggered via `desktopCapturer.getSources()` before the audio pipeline starts; clear error surfaced when permission is `denied` or `restricted` instead of silent black frames\r\n- **Dodo Payments integration** — webhook processing with HMAC-SHA256 verification, idempotent replay guard (24 h), `subscription.on_hold` \u002F `subscription.failed` handling, and correct plan detection via product-ID env vars\r\n\r\n## Improvements\r\n\r\n- **LLM resilience** — 60 s \u002F 3-retry wrapper for OpenAI and 90 s \u002F 3-retry wrapper for Claude; Natively API falls back to Gemini on failure; `streamChatWithGemini` chain now tries Natively first\r\n- **GoogleSTT proactive restart** — pre-emptive stream restart at 4 min 30 s avoids the 5-minute hard-limit gap; improved buffer flush safety\r\n- **REST STT upload size** — audio resampled to 16 kHz mono before upload, cutting file size ~6× and keeping files under Groq\u002FOpenAI 25 MB limit\r\n- **Deepgram reconnect cap** — max 10 reconnect attempts to prevent infinite retry loops during outages\r\n- **Windows z-order fix** — `setAlwaysOnTop` re-asserted after `showInactive()` in `switchToOverlay()` and `showOverlay()` to prevent DWM silently demoting the overlay layer\r\n- **macOS compositor hide delay** — increased from 50 ms to 150 ms so the window is fully removed before screenshot capture, preventing black frames on slower machines\r\n- **STT key pre-population** — settings panel now correctly restores saved STT provider keys across all providers (Groq, OpenAI, Deepgram, ElevenLabs, Azure, IBM, Soniox)\r\n- **Custom provider timeouts** — 30 s `AbortController` timeout added to `executeCustomProvider()` and `streamWithCustom()` to prevent indefinite hangs on unresponsive endpoints\r\n\r\n## Fixes\r\n\r\n- **Fixed the audio transcription issue** — root cause was Deepgram receiving raw PCM without `encoding=linear16` in the WebSocket URL, causing immediate `upstream_closed` with code 1000 and zero transcription output\r\n- **Fixed STT over-billing** — language\u002Fsample-rate reconnects produced short-lived sessions billed as 1 min each; added 30 s minimum threshold for sessions with no Deepgram speech, and changed `Math.ceil` → `Math.round` for fairer rounding; duplicate mic-channel billing that doubled every session is now removed\r\n- **Fixed native module crash** — `better-sqlite3` rebuilt with `electron-rebuild` for Electron's NMV 130, resolving `NODE_MODULE_VERSION 141 vs 130` crashes that broke all DB operations (meetings, RAG, KnowledgeOrchestrator)\r\n- **Fixed asar stub audio silence** — `app.asar.unpacked` path now checked first in `nativeModuleLoader`; added functional smoke-test (`getInputDevices()` must return an Array) to permanently catch this class of false-pass\r\n- **Fixed ad-hoc signing order** — `codesign --deep` on the `.app` runs first, then `.node` binaries are re-signed with entitlements, preventing `--deep` from stripping the screen-capture entitlement\r\n- **Fixed keybind registration failure surfacing** — when `globalShortcut.register()` returns `false`, a `keybinds:registration-failed` IPC event is now broadcast to the renderer so the UI can warn the user\r\n- **Fixed dark mode logo** — `force-black-icon` CSS class now correctly renders the logo white\u002Ffaded in dark mode; resolved specificity conflict where opacity was silently overridden\r\n- **Fixed theme flash on launch** — inline `\u003Cscript>` in `index.html` reads `localStorage` and applies `data-theme` synchronously before the JS bundle loads, eliminating the light\u002Fdark flicker on repeat launches\r\n- **Fixed Natively STT fallback** — users with a stored `sttProvider='natively'` but no key no longer crash on meeting start; gracefully falls back to GoogleSTT\r\n- **Fixed `concurrent_session_blocked` permanent lock** — removed it from the fatal-error list; server closes the socket and `scheduleReconnect()` retries correctly after the old session is cleaned up\r\n- **Integrated several PRs** — napi-rs v3 migration for native audio entrypoints, Windows header layout fix, platform-aware shortcut symbols in About section, and various TypeScript type-safety improvements across `electron.d.ts`, `preload.ts`, and `ipcHandlers.ts`\r\n\r\n## Technical\r\n\r\n- Migrated native audio entrypoints to napi-rs v3 `ThreadsafeFunction` API with err-first `(err, arg)` callback signatures\r\n- `nativelyapi\u002Fserver.js` comprehensive rewrite fixing 15 critical bugs (missing imports, `ReferenceError` constants, session key scoping, HMAC webhook verification, ElevenLabs message normalization, and more)\r\n- `scripts","2026-03-28T19:04:25",{"id":194,"version":195,"summary_zh":196,"released_at":197},109543,"v2.0.8","## Summary\r\n\r\nVersion 2.08 introduces major advancements in stealth routing, Mouse Passthrough Mode, Multimodal Groq support, and resolves critical bugs to ensure a seamless workflow.\r\n\r\n## What's New\r\n\r\n- **Multimodal Groq Support**: Integrated `meta-llama\u002Fllama-4-scout-17b-16e-instruct` into the ecosystem for high-speed screenshot analysis capability.\r\n- **Mouse Passthrough Mode**: Merged backend Electron mouse event management with full state-sync between the global keybind manager and the React renderer interface.\r\n- **Updated Baseline Model Roster**: Updated default architecture models to utilize the latest generation `gpt-5.4-chat`, `gemini-3.1`, and `claude-sonnet-4-6`.\r\n\r\n## Improvements\r\n\r\n- **Instant Stealth Boot**: Refactored the app initialization sequence to immediately read cached `isUndetectable` states on boot for an instant stealth startup.\r\n- **Windows Opacity Shield**: Implemented an opacity shield on protected windows to eliminate a 1-frame screen flash when showing protected UI elements.\r\n- **Expanded Token Limits**: Increased Groq max completion tokens to 8192 (the API maximum) to better support full code generation without hitting API constraints.\r\n- **Stronger Model Rotation Engine**: Fortified the 3-tier fallback mechanisms and auto-upgrade logic across Gemini, Claude, GPT, and Groq models.\r\n\r\n## Fixes\r\n\r\n- **Permanent Hide & Visibility Flaw**: Repaired a critical IPC routing bug where hiding the session UI dynamically misrouted `Cmd+B` unhide commands to the background Launcher, eliminating the \"invisible interface\" trap that often forced application restarts.\r\n- **SQLite-Vec Corruptions**: Fixed a critical silent data-corruption bug caused by tight strict dimensionality constraints (`float[1536]`), writing a multi-table database manager to ensure 100% of generated embeddings are retained and searchable.\r\n- **Groq Multimedia Drop**: Resolved a \"Front Door\" routing bug where image attachments bypassed the Groq engine completely and triggered false LLM connection errors.\r\n- **Critical Race Conditions**: Eliminated dangerous global state mutations during LLM fallback loops and patched `SettingsManager` early-initialization fatal crashes.\r\n- **Memory & Resource Leaks**: Hardened native audio listener destruction to prevent zombie callbacks, and cleared dangling floating timeout IDs on opacity shields and disguise timers.\r\n- **Cross-Platform Compatibility**: Rectified hardcoded `natively.icns` dependencies causing missing-icon errors on Windows\u002FLinux environments.\r\n\r\n## Technical\r\n\r\n- **Native Module Loader Pipeline**: Integrated a custom NAPI-RS absolute binary loader (`nativeModuleLoader.ts`) to entirely bypass POSIX-symlink `require` failures on Windows Git Bash.\r\n- **Safe Worker Instantiation**: Relocated electron-specific module imports into execution-time lazy requires, neutralizing cross-boundary crashes.\r\n- **PR Code Integrations**: Cleanly integrated complex changes from PR #64, PR #71, PR #113, PR #115.\r\n\r\n## ⚠️macOS Installation (Unsigned Build)\r\n\r\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\r\n\r\nIf you see \"App is damaged\":\r\n\r\n- **For .zip downloads:**\r\n  1. Move the app to your Applications folder.\r\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n- **For .dmg downloads:**\r\n  1. Open Terminal and run:\r\n     ```bash\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.8-arm64.dmg\r\n     # Or for Intel Macs:\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.8-x64.dmg\r\n     ```\r\n  2. Install the natively.dmg\r\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n## ⚠️Windows Installation (Unsigned Build)\r\n\r\nWhen running the installer on Windows, you might see a \"Windows protected your PC\" warning from Microsoft Defender SmartScreen saying it prevented an unrecognized app from starting. \r\n\r\nSince this is an unsigned build, this is expected. You can safely ignore it by clicking **More info** and then **Run anyway**.\r\n","2026-03-24T23:04:51",{"id":199,"version":200,"summary_zh":201,"released_at":202},109544,"v2.0.7","## Summary\r\n\r\nVersion 2.0.7 is a substantial update focusing on deep stability, memory management, critical fallback mechanics, and brand-new stealth and pass-through capabilities.\r\n\r\n## What's New\r\n\r\n- **Multimodal Groq Support:** Integrated `meta-llama\u002Fllama-4-scout-17b-16e-instruct` into the ecosystem to enable native, blazing-fast screenshot analysis capability.\r\n- **Mouse Passthrough Mode:** Added a system-level global hotkey to seamlessly toggle mouse pass-through, allowing direct click-through interactions with any background window beneath the assistant.\r\n\r\n## Improvements\r\n\r\n- **Upgraded Model Roster:** The baseline AI architecture now defaults to the leading-edge `gpt-5.4-chat`, `gemini-3.1`, and `claude-sonnet-4-6`.\r\n- **Model Rotation Engine:** Fortified the 3-tier fallback mechanisms and auto-upgrade logic across all providers (Gemini, Claude, GPT, Groq) to guarantee absolute response generation reliability.\r\n- **Groq Token Expansion:** Increased max completion tokens to 8192 (the API physical limit) to drastically improve full code generation output and prevent `BadRequestError` drops.\r\n- **Process Disguise Mapping:** Enhanced stealth logic on Windows by contextually mapping macOS disguise names (e.g., automatically matching \"Terminal\" to \"Command Prompt\").\r\n- **SEO & Documentation:** Deeply optimized the project `README.md` for search engines, injecting targeted keywords and detailed solution guides.\r\n\r\n## Fixes\r\n\r\n- **Permanent Hide Trap & Data Loss:** Repaired a critical IPC routing vulnerability where hiding the UI permanently trapped the app in the background. This previously frustrated users into Force-Quitting, which triggered data corruption and wiped out stored API keys.\r\n- **Vector Database Corruption:** Fixed a silent indexing failure in `sqlite-vec` where using mixed embeddings (like local Ollama 768-dim alongside OpenAI 1536-dim) would permanently drop data. We've provisioned strict, dimension-specific tables (`vec_chunks_768`, `1536`, `3072`).\r\n- **Punctured Fallbacks:** Corrected Gemini `generateWithVisionFallback` chains to properly execute tier downgrades instead of silently failing.\r\n- **Groq Multimedia Drops:** Fixed a frontend routing defect that was preventing Groq from handling image attachments.\r\n- **Opacity Flash Bug:** Implemented a Windows \"opacity shield\" to resolve a 1-frame visual screen flash when pulling up stealth\u002Fprotected windows.\r\n- **Passthrough & Shortcut Dead Zones:** Fixed bugs where the passthrough hotkey became temporarily unresponsive when the main app \"stole\" OS focus, and ensured the transparent overlay securely resets itself after ending a meeting.\r\n\r\n## Technical\r\n\r\n- **Native Audio Loader:** Bypassed restrictive POSIX-symlink resolutions by mapping absolute paths directly to `.node` binaries, successfully repairing persistent start-up crashes—especially on Windows Git Bash.\r\n- **Memory & Listener Leaks:** Eradicated massive runtime memory leaks by clearing unbounded array loops in the disguise engine, aggressively detaching Rust\u002FC++ libuv event emitters on audio stops, and cleanly destroying opacity timers upon window focus shifts.\r\n- **Boot Race Conditions:** Patched application boot order crashes involving `userData` lookups resolving before Electron was explicitly ready.\r\n- **Complete Thread-Safety:** Eliminated dangerous global variable mutations in fallback chains by migrating to strictly localized `modelIdOverride` parameters, securing API loops.\r\n- **Settings State Persistence:** Installed fail-safes into the `SettingsManager` parser so invalid or manually corrupted configurations default gracefully rather than immediately crashing the boot sequence.\r\n- **Dynamic OS Icon Binding:** Ensured `.icns`, `.ico`, and `.png` icons dynamically bind to their respective Darwin, win32, and Linux target platform payloads. \r\n\r\n## ⚠️macOS Installation (Unsigned Build)\r\n\r\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\r\n\r\nIf you see \"App is damaged\":\r\n\r\n- **For .zip downloads:**\r\n  1. Move the app to your Applications folder.\r\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n- **For .dmg downloads:**\r\n  1. Open Terminal and run:\r\n     ```bash\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.7-arm64.dmg\r\n     # Or for Intel Macs:\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.7-x64.dmg\r\n     ```\r\n  2. Install the natively.dmg\r\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n## ⚠️Windows Installation (Unsigned Build)\r\n\r\nWhen running the installer on Windows, you might see a \"Windows protected your PC\" warning from Microsoft Defender SmartScreen saying it prevented an unrecognized app from starting. \r\n\r\nSince this is an unsigned build, this is expected. You can safely ignore it by clicking **More info** and then **Run anyway**.\r\n","2026-03-23T13:00:00",{"id":204,"version":205,"summary_zh":206,"released_at":207},109545,"v2.0.6","## Summary\r\n\r\nVersion 2.0.6 introduces critical stealth mode enhancements, an upgraded model roster with massive token limit increases for Groq, and a thorough senior-level code audit fixing multiple race conditions, memory leaks, and silent tracking drops.\r\n\r\n## What's New\r\n\r\n- **Multimodal Groq Support**: Integrated `meta-llama\u002Fllama-4-scout-17b-16e-instruct` into the ecosystem for screenshot analysis capability.\r\n- **Model Roster Update**: Updated baseline architecture natively to default to `gpt-5.4-chat`, `gemini-3.1`, and `claude-sonnet-4-6`.\r\n- **Token Limits**: Increased Groq max completion tokens to 8192 (the API maximum for context windows) to better support full code generation while preventing `BadRequestError`.\r\n- **Model Rotation Engine**: Fortified the 3-tier fallback mechanisms and auto-upgrade logic for Gemini, Claude, GPT, and Groq models.\r\n- **OpenAI Streaming STT**: Implemented a brand new low-latency WebSocket integration via the OpenAI Realtime API. Uses a 3-tier priority rotation (`gpt-4o-transcribe` → `gpt-4o-mini-transcribe` → `whisper-1` REST) with server-side VAD, noise reduction, and uninterrupted audio buffering.\r\n\r\n## Improvements\r\n\r\n- **SEO & Documentation**: Optimized `README.md` for search engines with hidden targeted keywords.\r\n- **Code Quality**: Performed a senior-level code review across modified files to address potential race conditions, edge cases, and empty references.\r\n- **STT Providers Architecture**: Refactored Google, Deepgram, Soniox, and ElevenLabs streaming implementations. Specifically engineered the OpenAI module with custom ring-buffers, a 10s dark-drop timeout, a 5s zombie-session timeout, and 250ms audio chunk limiters to eradicate API rate-limits.\r\n- **Stealth Boot Refactor**: Centralized platform disguise and dock icon management into `AppState` for consistent stealth behavior across reboots.\r\n\r\n## Fixes\r\n\r\n- **Critical Race Condition**: Eliminated dangerous `this.geminiModel` global state mutations during API fallback loops.\r\n- **Silent Fallback Failure**: Rewired the `generateWithVisionFallback` chains for Gemini to correctly inject auto-discovered Tier models instead of defaulting back to generic UI settings.\r\n- **Groq Multimedia Drop**: Repaired the \"Front Door\" routing bug in `streamChat` where image attachments bypassed the Groq engine completely and threw a \"No LLM provider available\" error.\r\n- **App Boot Race Condition**: Wrapped `SettingsManager.getInstance()` constructor file-system access in `app.isReady()` checks to prevent early-import fatal crashes.\r\n- **Settings State Persistence**: Added validation to `SettingsManager`'s JSON parser so corrupted `settings.json` files default to `{}` safely.\r\n- **Opacity Shield Memory Leak**: Saved references to the 60ms `setTimeout` Windows flash-shield timers in Windows Helpers and properly cleared them.\r\n- **Settings\u002FModelSelector Crash Risk**: Appended `.catch()` blocks to the `loadURL()` directives in Windows Helpers to handle React dev-server drops gracefully.\r\n- **Disguise Timer Memory Leak**: Rewrote `main.ts`'s process title disguise implementation to immediately strip timer IDs when completed.\r\n- **Ollama Initialization Risk**: Wrapped the `OllamaBootstrap.bootstrap()` floating promise in a tracked class property.\r\n- **Windows Icon Pathing**: Rewrote the `icon` constructor option mapping to dynamically resolve `natively.icns` for `darwin`, `icon.ico` for `win32`, and `icon.png` for Linux.\r\n- **Cross-Platform Disguise**: Verified mapping `Terminal` to `Command Prompt` on Windows and isolating `CFBundleName` safely to macOS environments.\r\n- **SQLite-Vec Per-Dimension Table Fix (v8 Migration)**: Fixed a critical silent data-corruption bug by provisioning three per-dimension table pairs (`vec_chunks_768`, `1536`, `3072`) and updating the VectorStore write path and native search payloads.\r\n- **Permanent Hide & State Clear Trap**: Repaired a critical IPC routing flaw in `WindowHelper.this.getMainWindow()` where hiding the session UI dynamically misrouted all subsequent `Cmd+B` \u002F \"Toggle Visibility\" commands to the background Launcher. This invisible interface trap caused users to repeatedly Force Quit the application via macOS dock. The forceful ungraceful exits during background syncing rounds led to truncated (wiped out) JSON files, erasing STT API keys and Disguise Settings. Atomic writes (implemented prior) prevent corruption during sudden exits, and this IPC fix completely solves the actual interface disappearance bug.\r\n\r\n## Technical\r\n\r\n- **PR Integration**: Safely integrated changes from PR #64 (\"Build stealth-mode enhancements\") and PR #71, conducted code reviews, and ensured build compatibility without modifying git history.\r\n- **SettingsManager**: Created `SettingsManager` to securely persist boot-critical settings (`isUndetectable`).\r\n- **Initialization**: Refactored `initializeApp` sequence in `main.ts` to immediately read cached `isUndetectable` state on boot via `SettingsManager` for ins","2026-03-17T10:21:58",{"id":209,"version":210,"summary_zh":211,"released_at":212},109546,"v2.0.5","## Summary\r\n\r\nVersion 2.0.5 delivers major reliability fixes to Stealth Mode and Process Disguise.\r\n\r\n## Improvements\r\n\r\n- **Stealth Mode UI**: The Process Disguise selector is now visually disabled and locked while Undetectable mode is active, preventing accidental state mismatches.\r\n- **State Synchronization**: Greatly improved internal state synchronization across all application windows (Settings, Launcher, Overlay).\r\n\r\n## Fixes\r\n\r\n- **Infinite Feedback Loops**: Completely eliminated the bug where toggling Undetectable mode would sometimes cause the app to rapidly toggle itself on and off.\r\n- **Delayed Dock Reappearance**: Fixed a regression where the macOS dock icon would mysteriously reappear several seconds after entering stealth mode if a disguise had recently been changed.\r\n- **Initial State Loading**: Fixed an issue where the Settings UI would briefly show incorrect toggle states when first opened.\r\n- **macOS OS-level Events**: Hardened the app against macOS `activate` events (like clicking the app in Finder) accidentally breaking stealth mode.\r\n\r\n## Technical\r\n\r\n- Refactored IPC (Inter-Process Communication) listeners for `SettingsPopup` and `SettingsOverlay` to use a strict one-way (receive-only) data binding pattern.\r\n- Added strict management and cancellation of `forceUpdate` timeouts during stealth mode transitions.\r\n- Added explicit type safety for the new getters in `electron.d.ts`.\r\n\r\n## ⚠️macOS Installation (Unsigned Build)\r\n\r\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\r\n\r\nIf you see \"App is damaged\":\r\n\r\n- **For .zip downloads:**\r\n  1. Move the app to your Applications folder.\r\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n- **For .dmg downloads:**\r\n  1. Open Terminal and run:\r\n     ```bash\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.5-arm64.dmg\r\n     # Or for Intel Macs:\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.5-x64.dmg\r\n     ```\r\n  2. Install the natively.dmg\r\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n","2026-03-15T07:56:44",{"id":214,"version":215,"summary_zh":216,"released_at":217},109547,"v2.0.4","## Summary\n\nVersion 2.0.4 introduces a massive architectural overhaul to the native audio pipeline, guaranteeing production-ready stability, true zero-allocation data transfer, and instantaneous STT responsiveness with WebRTC ML-based VAD.\n\n## What's New\n\n- **Two-Stage Silence Processing**: Replaced basic RMS noise gating with a two-stage pipeline combining an adaptive RMS threshold and WebRTC Machine Learning VAD. Rejects typing, fan noise, and non-speech sounds before they bill STT APIs.\n- **Zero-Copy ABI Transfers**: Transitioned the `ThreadsafeFunction` bridging to direct `napi::Buffer` (Uint8Array) allocations, completely eliminating V8 garbage collection pressure during continuous capture.\n- **Sliding-Window RAG**: Implemented a 50-token semantic overlap in `SemanticChunker.ts` to prevent conversational context loss across chunk boundaries.\n\n## Improvements\n\n- **Latency & Responsiveness Tuning**: Stripped redundant TS debouncing, slashed `MIN_BUFFER_BYTES`, and reduced native hangover, achieving a ~300ms reduction in end-to-end transcription latency. short utterances (\"Yes\", \"Stop\") no longer sit trapped in the buffer.\n- Removed floating-point division truncation for superior downsampling from 44.1kHz external microphones.\n\n## Fixes\n\n- Fixed a critical bug where the native Rust monitor returned a hardcoded `16000Hz` while actually streaming 48kHz audio. Now syncs true hardware sample rates.\n- Resolved the \"Input missing\" silent crash bug on microphone restarts by properly recreating the CPAL stream.\n- Restored the 10s continuous speech backstop for REST APIs to prevent unbounded buffer growth.\n- Added missing `notifySpeechEnded()` properties and cleaned up dangerous type casts.\n\n## Technical\n\n- Audio processing transitioned entirely to strict ABI memory bridging (`napi::Buffer`)\n- Re-architected native silence_suppression state machine around WebRTC VAD inputs.\n\n## ⚠️macOS Installation (Unsigned Build)\n\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\n\nIf you see \"App is damaged\":\n\n- **For .zip downloads:**\n  1. Move the app to your Applications folder.\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n- **For .dmg downloads:**\n  1. Open Terminal and run:\n     ```bash\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.4-arm64.dmg\n     # Or for Intel Macs:\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.4.dmg\n     ```\n  2. Install the natively.dmg\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n","2026-03-14T13:01:11",{"id":219,"version":220,"summary_zh":221,"released_at":222},109548,"v2.0.3","## Summary\n\nDynamic AI Model Selection, Multimodal Fallback capability, Multiple Screenshot Support, and a robust new embedding architecture.\n\n## What's New\n\n- **Dynamic AI Model Selection:** Replaced static model lists with dynamic dropdowns. Your preferred models synced from providers (like OpenAI, Anthropic, Google) now automatically appear across the entire app.\n- **Multimodal Resilience:** Added a \"Smart Dynamic Fallback\" using Groq Llama 4 Scout. If default vision models fail or get rate-limited during screen analysis, Natively instantly reroutes the image to ensure uninterrupted performance.\n- **Multiple Screenshot Support:** The Natively Interface can now handle and process multiple attached screenshots simultaneously instead of just one.\n\n## Improvements\n\n- **Improved Settings UX:** API keys now auto-save after 5 seconds of inactivity, and selecting a preferred model immediately updates the app without requiring a reload.\n- **Better Embeddings:** Migrated from Gemini Embedding to a completely new and more robust embedding architecture.\n\n## Fixes\n\n- **Claude Fixes:** Resolved max_tokens and context limits issues specific to Anthropic Claude interactions.\n\n## Technical\n\n- **DRY Refactoring:** Centralized model configuration strings across the codebase to ensure easier future updates.\n\n## ⚠️macOS Installation (Unsigned Build)\n\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\n\nIf you see \"App is damaged\":\n\n- **For .zip downloads:**\n  1. Move the app to your Applications folder.\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n- **For .dmg downloads:**\n  1. Open Terminal and run:\n     ```bash\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.3-arm64.dmg\n     # Or for Intel Macs:\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.3-x64.dmg\n     ```\n  2. Install the natively.dmg\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n","2026-03-13T17:15:12",{"id":224,"version":225,"summary_zh":226,"released_at":227},109549,"v2.0.2","## Summary\r\nv2.0.2 focuses on fixing Windows system audio capture, improving RAG stability, and resolving critical Soniox STT configuration issues.\r\n\r\n## What's New\r\n- Fully functional system audio capture for Windows\r\n- Introduced system for manual transcript finalization and interim\u002Ffinal bridging during recordings\r\n\r\n## Improvements\r\n- Migrated to `app.getAppPath()` for reliable cross-platform resource discovery\r\n- Ensured `sqlite-vec` compatibility and fixed embedding queue management\r\n- Upgraded `@google\u002Fgenai` and optimized embedding dimensionality for lower latency\r\n\r\n## Fixes\r\n- Improved Soniox STT streaming reliability, manual flushing, and configuration persistence\r\n- Resolved application entry point and module resolution issues in production builds\r\n- Fixed transcript bridging for manual recording mode\r\n- Corrected stealth activation and window focus inconsistencies\r\n\r\n## Technical\r\n- Dependency updates for `@google\u002Fgenai`\r\n- Cleaned up native compiler warnings for Windows\r\n- Fixed module resolution for internal Electron paths\r\n\r\n## ⚠️macOS Installation (Unsigned Build)\r\n\r\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\r\n\r\nIf you see \"App is damaged\":\r\n- **For .zip downloads:**\r\n  1. Move the app to your Applications folder.\r\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n- **For .dmg downloads:**\r\n  1. Open Terminal and run: \r\n     ```bash\r\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.2-arm64.dmg\r\n     ```\r\n  2. Install the natively.dmg\r\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n","2026-03-11T21:45:47",{"id":229,"version":230,"summary_zh":231,"released_at":232},109550,"v2.0.1","## Summary\n\nThe Intelligence & Stability Update, bringing Live RAG, Soniox support, and a completely overhauled UI.\n\n## What's New\n\n- Live Meeting RAG Integration for instantly pulling facts from past meetings.\n- Soniox speech provider for high-accuracy, low-latency transcriptions.\n- Customizable AI response languages and speech languages.\n\n## Improvements\n\n- Radically redesigned Profile Engine UI with a mature Apple-like minimalist aesthetic and glassmorphism.\n- Introduced Apple Premium Dark aesthetic for code blocks to preserve critical whitespace.\n- Re-engineered Windows audio capture resolving WASAPI compatibility issues.\n- Improved Markdown readability and stability across chat output via strict syntax highlighting.\n\n## Fixes\n\n- Resolved critical SQLite embedding indexing constraint errors.\n- Fixed ad-dismissal cooldowns to properly track hidden states.\n- Handled React dependency conflicts related to async-storage.\n- Fixed Native module compilation and packaging for smoother delivery on Windows.\n\n## Technical\n\n- Merged 3 major PRs streamlining repository contributions.\n- Added correct Windows build icons.\n\n## ⚠️macOS Installation (Unsigned Build)\n\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\n\nIf you see \"App is damaged\":\n\n- **For .zip downloads:**\n  1. Move the app to your Applications folder.\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n- **For .dmg downloads:**\n  1. Open Terminal and run:\n     ```bash\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.1-arm64.dmg\n     ```\n  2. Install the natively.dmg\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n","2026-03-06T00:28:48",{"id":234,"version":235,"summary_zh":236,"released_at":237},109551,"v2.0","## Summary\nNatively 2.0 — Major update with multi-language support, new STT providers, Google Search integration, and premium features.\n\n## What's New\n- Multi-language support for STT and AI responses with language selection in settings\n- Soniox STT provider added as a new speech-to-text option\n- Google Custom Search integration for real-time research results\n- Premium upgrade system with feature locking\n- Enhanced Company Research Engine with JD context support\n- Improved Knowledge Orchestrator and AOT (Ahead-of-Time) processing pipeline\n\n## Improvements\n- Launcher light mode theme refinements\n- Better system audio device selection\n- Improved Windows audio capture and WASAPI compatibility\n\n## Fixes\n- Fixed hardcoded English language for STT — Whisper now auto-detects languages\n- Fixed OpenAI GPT 5.2 model compatibility (max_completion_tokens)\n- Resolved merge conflicts and restored lost code changes across multiple modules\n\n## Technical\n- Refactored knowledge pipeline and NativeAudio stubs\n- Updated type definitions for stored credentials\n- Dependency and build packaging fixes for production\n\n## ⚠️macOS Installation (Unsigned Build)\n\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\n\nIf you see \"App is damaged\":\n- **For .zip downloads:**\n  1. Move the app to your Applications folder.\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n- **For .dmg downloads:**\n  1. Open Terminal and run: \n     ```bash\n     xattr -cr ~\u002FDownloads\u002FNatively-2.0.0-arm64.dmg\n     ```\n  2. Install the natively.dmg\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`","2026-03-02T19:47:20",{"id":239,"version":240,"summary_zh":241,"released_at":242},109552,"v1.1.8","## Summary\nPatch update addressing OpenAI GPT 5.x compatibility and increasing token output limits for all providers.\n\n## What's New\n- Replaced deprecated `max_tokens` parameter with `max_completion_tokens` required by GPT 5.x models.\n- Increased max output tokens for OpenAI (GPT 5.2) and Claude (Sonnet 4.5) to 65,536.\n- Increased max output tokens for Groq (Llama 3.3 70B) to 32,768.\n\n## Improvements\n- Improved response length capabilities across all text-generation AI models.\n- Updated connection test model to use `gpt-5.2-chat-latest` instead of the deprecated `gpt-3.5-turbo`.\n\n## Fixes\n- Fixed 400 error when using OpenAI GPT 5.x models for text queries and toggle actions.\n\n## Technical\n- Replaced `max_tokens` with `max_completion_tokens` in `LLMHelper.ts` and `ipcHandlers.ts`.\n\n## ⚠️macOS Installation (Unsigned Build)\n\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\n\nIf you see \"App is damaged\":\n- **For .zip downloads:**\n  1. Move the app to your Applications folder.\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n- **For .dmg downloads:**\n  1. Open Terminal and run: \n     ```bash\n     xattr -cr ~\u002FDownloads\u002FNatively-1.1.8-arm64.dmg\n     ```\n  2. Install the natively.dmg\n  3. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`","2026-02-23T06:51:04",{"id":244,"version":245,"summary_zh":246,"released_at":247},109553,"v1.1.7","## Summary\nSecurity hardening, memory optimization, and stability improvements for a more robust and reliable experience.\n\n## What's New\n- API rate limiting to prevent 429 errors on free-tier plans (Gemini, Groq, OpenAI, Claude)\n- Cross-platform screenshot support (macOS, Linux, Windows)\n- Official website link added to the About section\n\n## Improvements\n- Smarter transcript memory management with epoch summarization instead of hard truncation — no more losing early meeting context\n- API keys are now scrubbed from memory on app quit to minimize exposure window\n- Credentials manager now overwrites key data before disposal for enhanced security\n- Helper process renaming for improved stealth in Activity Monitor\n\n## Fixes\n- Fixed V8\u002FElectron entitlements crash on Intel Macs by including entitlements.mac.plist during ad-hoc signing\n- Fixed process disguise not applying correctly when undetectable mode is toggled on\n- Fixed usage array capping with dedicated helper method to prevent unbounded growth\n\n## Technical\n- Added `RateLimiter` service (token bucket algorithm with configurable burst and refill rates)\n- Added `PRIVACY.md` and `SECURITY.md` policy documents\n- Refactored ad-hoc signing script with helper renaming and proper entitlements flow\n- Version bump to 1.1.7\n\n## ⚠️macOS Installation (Unsigned Build)\n\nDownload the correct architecture .zip or .dmg file for your device (Apple Silicon or Intel).\n\nIf you see \"App is damaged\":\n- **For .zip downloads:**\n  1. Move the app to your Applications folder.\n  2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\n\n- **For .dmg downloads:**\n  1. Open Terminal and run: \n     ```bash\n     xattr -cr ~\u002FDownloads\u002FNatively-1.1.7-arm64.dmg && \\\n     xattr -cr \u002FApplications\u002FNatively.app\n     ```\n","2026-02-20T05:21:29",{"id":249,"version":250,"summary_zh":251,"released_at":252},109554,"v1.1.6","## Summary\r\nThe \"Total Recall\" Update: Introducing Fast Response Mode, Local RAG & Long-Term Memory, custom key bindings, and enhanced stealth features for the ultimate AI assistant experience.\r\n\r\n## What's New\r\n- Fast Response Mode: Super-fast text processing powered by Groq Llama 3.\r\n- Local RAG & Memory: Offline semantic search and retrieval across all your past meetings.\r\n- Speech Providers: Added support for 7+ speech providers including Google, Groq, OpenAI, Deepgram, ElevenLabs, Azure, and IBM Watson.\r\n- Custom Key Bindings: Full control over global shortcuts to fit your workflow.\r\n- Stealth Mode 2.0: Enhanced disguise options (Terminal, Activity Monitor) and \"undetectable\" dock mode.\r\n- Markdown Support: Improved rendering in usage section for better readability of AI responses.\r\n- Image Processing: Faster image analysis powered by sharp optimizations.\r\n\r\n## Improvements\r\n- Performance: Optimized image handling and reduced latency for multimodal tasks.\r\n- UX Refinement: Verified and fixed focus stealing issues for a smoother experience.\r\n\r\n## Fixes\r\n- Fixed UI bugs related to window focus.\r\n- Resolved minor styling issues in the usage section.\r\n\r\n## Technical\r\n- Integrated sharp for server-side image processing.\r\n- Optimized SQLite vector retrieval for local RAG.\r\n- Unified speech provider interface for 7+ integrations.\r\n\r\n## ⚠️macOS Installation (Unsigned Build)\r\n\r\nDownload the correct architecture .zip file for your device (Apple Silicon or Intel) and extract it.\r\n\r\nIf you see \"App is damaged\":\r\n1. Move the app to your Applications folder.\r\n2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app\r\n\r\n\r\n","2026-02-14T23:16:28",{"id":254,"version":255,"summary_zh":256,"released_at":257},109555,"v1.1.5","## Summary\r\nThe Stealth & Intelligence Update: Enhances stealth capabilities, expands AI provider support, and improves local AI integration.\r\n\r\n## What's New\r\n- Native Speech Provider Support: Added Deepgram, Groq, and OpenAI speech providers.\r\n- Custom LLM Providers: Connect to any OpenAI-compatible API including OpenRouter and DeepSeek.\r\n- Smart Local AI: Auto-detection of available Ollama models for local AI.\r\n- Global Spotlight Search: Toggle chat overlay with Cmd+K (macOS) and Ctrl+K (Windows\u002FLinux).\r\n- Masquerading Mode: Appear as system processes like Terminal or Activity Monitor.\r\n- Improved Stealth Mode: Enhanced activation and window focus transitions.\r\n\r\n## Improvements\r\n- Natural Responses: Updated system prompts for more concise and natural responses.\r\n- Conversational Logic: Reduced robotic preambles and unnecessary explanations.\r\n- Performance: Improved UI scaling and reduced speech-to-text latency.\r\n\r\n## Fixes\r\n- No critical fixes reported in this release.\r\n\r\n## Technical\r\n- Internal logic refinements for improved conversational flow.\r\n- Updater and background process stability improvements.\r\n\r\n## ⚠️ macOS Installation (Unsigned Build)\r\n If you see \"App is damaged\":\r\n1. Move the app to your Applications folder.\r\n2. Open Terminal and run: `xattr -cr \u002FApplications\u002FNatively.app`\r\n\r\n","2026-02-13T06:36:34",{"id":259,"version":260,"summary_zh":261,"released_at":262},109556,"v1.1.3","# v1.1.3: OpenAI (GPT-5.2) & Claude (Sonnet 4.5) Support\r\n\r\n## 🚀 Key Features\r\n- **OpenAI (GPT-5.2) & Claude (Sonnet 4.5) Integration:** Fully supported alongside Gemini and Groq.\r\n- **Smart Dynamic Fallback:** Automatically rotates to the next available provider if one fails (Gemini → OpenAI → Claude → Groq).\r\n- **Relentless Retry:** Retries the entire provider chain up to 3 full rotations to guarantee a response.\r\n- **Enhanced Quality:** System prompts now use provider-native API roles for better adherence and formatting.\r\n- **Improved Multimodal:** More stable image understanding across all vision models.\r\n\r\n## ⚡ Performance\r\n- Zero-latency token streaming (no buffering).\r\n- Optimized error handling.","2026-02-09T22:47:19",{"id":264,"version":265,"summary_zh":79,"released_at":266},109557,"v1.1.2","2026-02-08T15:52:50"]