[{"data":1,"prerenderedAt":-1},["ShallowReactive",2],{"similar-hibayesian--awesome-automl-papers":3,"tool-hibayesian--awesome-automl-papers":61},[4,18,26,36,44,53],{"id":5,"name":6,"github_repo":7,"description_zh":8,"stars":9,"difficulty_score":10,"last_commit_at":11,"category_tags":12,"status":17},4358,"openclaw","openclaw\u002Fopenclaw","OpenClaw 是一款专为个人打造的本地化 AI 助手，旨在让你在自己的设备上拥有完全可控的智能伙伴。它打破了传统 AI 助手局限于特定网页或应用的束缚，能够直接接入你日常使用的各类通讯渠道，包括微信、WhatsApp、Telegram、Discord、iMessage 等数十种平台。无论你在哪个聊天软件中发送消息，OpenClaw 都能即时响应，甚至支持在 macOS、iOS 和 Android 设备上进行语音交互，并提供实时的画布渲染功能供你操控。\n\n这款工具主要解决了用户对数据隐私、响应速度以及“始终在线”体验的需求。通过将 AI 部署在本地，用户无需依赖云端服务即可享受快速、私密的智能辅助，真正实现了“你的数据，你做主”。其独特的技术亮点在于强大的网关架构，将控制平面与核心助手分离，确保跨平台通信的流畅性与扩展性。\n\nOpenClaw 非常适合希望构建个性化工作流的技术爱好者、开发者，以及注重隐私保护且不愿被单一生态绑定的普通用户。只要具备基础的终端操作能力（支持 macOS、Linux 及 Windows WSL2），即可通过简单的命令行引导完成部署。如果你渴望拥有一个懂你",349277,3,"2026-04-06T06:32:30",[13,14,15,16],"Agent","开发框架","图像","数据工具","ready",{"id":19,"name":20,"github_repo":21,"description_zh":22,"stars":23,"difficulty_score":10,"last_commit_at":24,"category_tags":25,"status":17},3808,"stable-diffusion-webui","AUTOMATIC1111\u002Fstable-diffusion-webui","stable-diffusion-webui 是一个基于 Gradio 构建的网页版操作界面，旨在让用户能够轻松地在本地运行和使用强大的 Stable Diffusion 图像生成模型。它解决了原始模型依赖命令行、操作门槛高且功能分散的痛点，将复杂的 AI 绘图流程整合进一个直观易用的图形化平台。\n\n无论是希望快速上手的普通创作者、需要精细控制画面细节的设计师，还是想要深入探索模型潜力的开发者与研究人员，都能从中获益。其核心亮点在于极高的功能丰富度：不仅支持文生图、图生图、局部重绘（Inpainting）和外绘（Outpainting）等基础模式，还独创了注意力机制调整、提示词矩阵、负向提示词以及“高清修复”等高级功能。此外，它内置了 GFPGAN 和 CodeFormer 等人脸修复工具，支持多种神经网络放大算法，并允许用户通过插件系统无限扩展能力。即使是显存有限的设备，stable-diffusion-webui 也提供了相应的优化选项，让高质量的 AI 艺术创作变得触手可及。",162132,"2026-04-05T11:01:52",[14,15,13],{"id":27,"name":28,"github_repo":29,"description_zh":30,"stars":31,"difficulty_score":32,"last_commit_at":33,"category_tags":34,"status":17},1381,"everything-claude-code","affaan-m\u002Feverything-claude-code","everything-claude-code 是一套专为 AI 编程助手（如 Claude Code、Codex、Cursor 等）打造的高性能优化系统。它不仅仅是一组配置文件，而是一个经过长期实战打磨的完整框架，旨在解决 AI 代理在实际开发中面临的效率低下、记忆丢失、安全隐患及缺乏持续学习能力等核心痛点。\n\n通过引入技能模块化、直觉增强、记忆持久化机制以及内置的安全扫描功能，everything-claude-code 能显著提升 AI 在复杂任务中的表现，帮助开发者构建更稳定、更智能的生产级 AI 代理。其独特的“研究优先”开发理念和针对 Token 消耗的优化策略，使得模型响应更快、成本更低，同时有效防御潜在的攻击向量。\n\n这套工具特别适合软件开发者、AI 研究人员以及希望深度定制 AI 工作流的技术团队使用。无论您是在构建大型代码库，还是需要 AI 协助进行安全审计与自动化测试，everything-claude-code 都能提供强大的底层支持。作为一个曾荣获 Anthropic 黑客大奖的开源项目，它融合了多语言支持与丰富的实战钩子（hooks），让 AI 真正成长为懂上",160411,2,"2026-04-18T23:33:24",[14,13,35],"语言模型",{"id":37,"name":38,"github_repo":39,"description_zh":40,"stars":41,"difficulty_score":32,"last_commit_at":42,"category_tags":43,"status":17},2271,"ComfyUI","Comfy-Org\u002FComfyUI","ComfyUI 是一款功能强大且高度模块化的视觉 AI 引擎，专为设计和执行复杂的 Stable Diffusion 图像生成流程而打造。它摒弃了传统的代码编写模式，采用直观的节点式流程图界面，让用户通过连接不同的功能模块即可构建个性化的生成管线。\n\n这一设计巧妙解决了高级 AI 绘图工作流配置复杂、灵活性不足的痛点。用户无需具备编程背景，也能自由组合模型、调整参数并实时预览效果，轻松实现从基础文生图到多步骤高清修复等各类复杂任务。ComfyUI 拥有极佳的兼容性，不仅支持 Windows、macOS 和 Linux 全平台，还广泛适配 NVIDIA、AMD、Intel 及苹果 Silicon 等多种硬件架构，并率先支持 SDXL、Flux、SD3 等前沿模型。\n\n无论是希望深入探索算法潜力的研究人员和开发者，还是追求极致创作自由度的设计师与资深 AI 绘画爱好者，ComfyUI 都能提供强大的支持。其独特的模块化架构允许社区不断扩展新功能，使其成为当前最灵活、生态最丰富的开源扩散模型工具之一，帮助用户将创意高效转化为现实。",109154,"2026-04-18T11:18:24",[14,15,13],{"id":45,"name":46,"github_repo":47,"description_zh":48,"stars":49,"difficulty_score":32,"last_commit_at":50,"category_tags":51,"status":17},6121,"gemini-cli","google-gemini\u002Fgemini-cli","gemini-cli 是一款由谷歌推出的开源 AI 命令行工具，它将强大的 Gemini 大模型能力直接集成到用户的终端环境中。对于习惯在命令行工作的开发者而言，它提供了一条从输入提示词到获取模型响应的最短路径，无需切换窗口即可享受智能辅助。\n\n这款工具主要解决了开发过程中频繁上下文切换的痛点，让用户能在熟悉的终端界面内直接完成代码理解、生成、调试以及自动化运维任务。无论是查询大型代码库、根据草图生成应用，还是执行复杂的 Git 操作，gemini-cli 都能通过自然语言指令高效处理。\n\n它特别适合广大软件工程师、DevOps 人员及技术研究人员使用。其核心亮点包括支持高达 100 万 token 的超长上下文窗口，具备出色的逻辑推理能力；内置 Google 搜索、文件操作及 Shell 命令执行等实用工具；更独特的是，它支持 MCP（模型上下文协议），允许用户灵活扩展自定义集成，连接如图像生成等外部能力。此外，个人谷歌账号即可享受免费的额度支持，且项目基于 Apache 2.0 协议完全开源，是提升终端工作效率的理想助手。",100752,"2026-04-10T01:20:03",[52,13,15,14],"插件",{"id":54,"name":55,"github_repo":56,"description_zh":57,"stars":58,"difficulty_score":32,"last_commit_at":59,"category_tags":60,"status":17},4721,"markitdown","microsoft\u002Fmarkitdown","MarkItDown 是一款由微软 AutoGen 团队打造的轻量级 Python 工具，专为将各类文件高效转换为 Markdown 格式而设计。它支持 PDF、Word、Excel、PPT、图片（含 OCR）、音频（含语音转录）、HTML 乃至 YouTube 链接等多种格式的解析，能够精准提取文档中的标题、列表、表格和链接等关键结构信息。\n\n在人工智能应用日益普及的今天，大语言模型（LLM）虽擅长处理文本，却难以直接读取复杂的二进制办公文档。MarkItDown 恰好解决了这一痛点，它将非结构化或半结构化的文件转化为模型“原生理解”且 Token 效率极高的 Markdown 格式，成为连接本地文件与 AI 分析 pipeline 的理想桥梁。此外，它还提供了 MCP（模型上下文协议）服务器，可无缝集成到 Claude Desktop 等 LLM 应用中。\n\n这款工具特别适合开发者、数据科学家及 AI 研究人员使用，尤其是那些需要构建文档检索增强生成（RAG）系统、进行批量文本分析或希望让 AI 助手直接“阅读”本地文件的用户。虽然生成的内容也具备一定可读性，但其核心优势在于为机器",93400,"2026-04-06T19:52:38",[52,14],{"id":62,"github_repo":63,"name":64,"description_en":65,"description_zh":66,"ai_summary_zh":66,"readme_en":67,"readme_zh":68,"quickstart_zh":69,"use_case_zh":70,"hero_image_url":71,"owner_login":72,"owner_name":73,"owner_avatar_url":74,"owner_bio":75,"owner_company":76,"owner_location":77,"owner_email":78,"owner_twitter":76,"owner_website":79,"owner_url":80,"languages":76,"stars":81,"forks":82,"last_commit_at":83,"license":84,"difficulty_score":85,"env_os":86,"env_gpu":87,"env_ram":87,"env_deps":88,"category_tags":91,"github_topics":92,"view_count":32,"oss_zip_url":76,"oss_zip_packed_at":76,"status":17,"created_at":97,"updated_at":98,"faqs":99,"releases":100},9491,"hibayesian\u002Fawesome-automl-papers","awesome-automl-papers","A curated list of automated machine learning papers, articles, tutorials, slides and projects","awesome-automl-papers 是一个精心整理的自动化机器学习（AutoML）资源库，汇集了该领域最新的学术论文、技术文章、教程幻灯片及开源项目。它旨在解决机器学习应用中过度依赖专家知识的痛点：传统流程中，数据预处理、特征选择、模型构建及超参数优化等步骤复杂且耗时，往往需要资深专家介入。而 AutoML 技术致力于将这些流程自动化，让非专家也能高效构建模型，甚至达到超越人工专家的性能表现。\n\n这份资源库非常适合机器学习研究人员、算法工程师以及希望深入了解 AutoML 前沿动态的开发者使用。通过关注该项目，用户可以快速掌握从自动数据清洗、自动特征工程（AutoFE）、超参数优化（HPO）到神经架构搜索（NAS）等核心技术进展。此外，它还提供了对谷歌、微软、阿里等主流科技公司 AutoML 系统能力的横向对比，帮助从业者理清技术路线。无论是想要入门该领域的新手，还是寻求最新研究灵感的资深学者，awesome-automl-papers 都是追踪这一蓬勃发展趋势的宝贵指南。","# Awesome-AutoML-Papers\n\n**Awesome-AutoML-Papers** is a curated list of *automated machine learning* papers, articles, tutorials, slides and projects. **Star** this repository, and then you can keep abreast of the latest developments of this booming research field. Thanks to all the people who made contributions to this project. Join us and you are welcome to be a contributor.\n\n\u003Cdiv style=\"text-align: center\">\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_readme_c4b1e8f7046c.png\" atl=\"banner\"\u002F>\n\u003C\u002Fdiv>\n\n# What is AutoML?\n*Automated Machine Learning* (AutoML) provides methods and processes to make Machine Learning available for non-Machine Learning experts, to improve efficiency of Machine Learning and to accelerate research on Machine Learning.\n\nMachine Learning (ML) has achieved considerable successes in recent years and an ever-growing number of disciplines rely on it. However, this success crucially relies on human machine learning experts to perform the following tasks:\n+ Preprocess the data,\n+ Select appropriate features,\n+ Select an appropriate model family,\n+ Optimize model hyperparameters,\n+ Postprocess machine learning models,\n+ Critically analyze the results obtained.\n\nAs the complexity of these tasks is often beyond non-ML-experts, the rapid growth of machine learning applications has created a demand for off-the-shelf machine learning methods that can be used easily and without expert knowledge. We call the resulting research area that targets progressive automation of machine learning *AutoML*. As a new sub-area in machine learning, *AutoML* has got more attention not only in machine learning but also in computer vision, natural language processing and graph computing.\n\nThere are no formal definition of *AutoML*. From the descriptions of most papers，the basic procedure of *AutoML* can be shown as the following.\n\n\u003Cdiv style=\"text-align: center\">\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_readme_46343347a781.jpg\" width=\"600px\" atl=\"figure1\"\u002F>\n\u003C\u002Fdiv>\n\n\n*AutoML* approaches are already mature enough to rival and sometimes even outperform human machine learning experts. Put simply, *AutoML* can lead to improved performance while saving substantial amounts of time and money, as machine learning experts are both hard to find and expensive. As a result, commercial interest in *AutoML* has grown dramatically in recent years, and several major tech companies and start-up companies are now developing their own *AutoML* systems. An overview comparison of some of them can be summarized to the following table.\n\n| Company       | AutoFE     | HPO        | NAS        |\n| :-----------: | :--------: | :--------: | :--------: |\n| 4paradigm     |     √      |      √     |      ×     |\n| Alibaba       |     ×      |      √     |      ×     |\n| Baidu         |     ×      |      ×     |      √     |\n| Determined AI |     ×      |      √     |      √     |\n| Google        |     √      |      √     |      √     |\n| DataCanvas    |     √      |      √     |      √     |\n| H2O.ai        |     √      |      √     |      ×     |\n| Microsoft     |     ×      |      √     |      √     |\n| MLJAR         |     √      |      √     |      √     |\n| RapidMiner    |     √      |      √     |      ×     |\n| Tencent       |     ×      |      √     |      ×     |\n\n\n**Awesome-AutoML-Papers** includes very up-to-date overviews of the bread-and-butter techniques we need in *AutoML*:\n+ Automated Data Clean (Auto Clean)\n+ Automated Feature Engineering (Auto FE)\n+ Hyperparameter Optimization (HPO)\n+ Meta-Learning\n+ Neural Architecture Search (NAS)\n\n\n\u003Cdiv style=\"text-align: center\">\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_readme_b955aa114624.png\" atl=\"automl\"\u002F>\n\u003C\u002Fdiv>\n\n\n# Table of Contents\n+ [Papers](#papers)\n  - [Surveys](#surveys)\n  - [Automated Feature Engineering](#automated-feature-engineering)\n    - [Expand Reduce](#expand-reduce)\n    - [Hierarchical Organization of Transformations](#hierarchical-organization-of-transformations)\n    - [Meta Learning](#meta-learning)\n    - [Reinforcement Learning](#reinforcement-learning)\n  - [Architecture Search](#architecture-search)\n    - [Evolutionary Algorithms](#evolutionary-algorithms)\n    - [Local Search](#local-search)\n    - [Meta Learning](#meta-learning-1)\n    - [Reinforcement Learning](#reinforcement-learning-1)\n    - [Transfer Learning](#transfer-learning)\n    - [Network Morphism](#network-morphism)\n    - [Continuous Optimization](#continuous-optimization)\n  - [Hyperparameter Optimization](#hyperparameter-optimization)\n    - [Bayesian Optimization](#bayesian-optimization)\n    - [Evolutionary Algorithms](#evolutionary-algorithms-1)\n    - [Lipschitz Functions](#lipschitz-functions)\n    - [Local Search](#local-search-1)\n    - [Meta Learning](#meta-learning-2)\n    - [Particle Swarm Optimization](#particle-swarm-optimization)\n    - [Random Search](#random-search)\n    - [Transfer Learning](#transfer-learning-1)\n  - [Performance Prediction](#performance-prediction)\n    - [Performance Prediction](##)\n  - [Frameworks](#frameworks)\n  - [Miscellaneous](#miscellaneous)\n+ [Tutorials](#tutorials)\n  - [Bayesian Optimization](#bayesian-optimization)\n  - [Meta Learning](#meta-learning-3)\n+ [Articles](#articles)\n  - [Bayesian Optimization](#bayesian-optimization)\n  - [Meta Learning](#meta-learning)\n+ [Slides](#slides)\n  - [Bayesian Optimization](#slides)\n+ [Books](#books)\n  - [Meta Learning](#books)\n+ [Projects](#projects)\n+ [Prominent Researchers](#prominent-researchers)\n\n# Papers\n### Surveys\n+ 2019 | AutoML: A Survey of the State-of-the-Art | Xin He, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1908.00709.pdf)\n+ 2019 | Survey on Automated Machine Learning | Marc Zoeller, Marco F. Huber | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1904.12054.pdf)\n+ 2019 | Automated Machine Learning: State-of-The-Art and Open Challenges | Radwa Elshawi, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1906.02287.pdf)\n+ 2018 | Taking Human out of Learning Applications: A Survey on Automated Machine Learning | Quanming Yao, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1810.13306.pdf)\n+ 2020 | On Hyperparameter Optimization of Machine Learning Algorithms: Theory and Practice | Li Yang, et al. | Neurocomputing | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F2007.15745.pdf)\n+ 2020 | Automated Machine Learning--a brief review at the end of the early years | Escalante, H. J. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F2008.08516.pdf)\n+ 2022 | IoT Data Analytics in Dynamic Environments: From An Automated Machine Learning Perspective | Li Yang, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F2209.08018.pdf)\n+ 2024 | Automated machine learning: past, present and future | Baratchi. M, et al. | Artificial Intelligence Review | [`Springer`](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs10462-024-10726-1)\n### Automated Feature Engineering\n+ #### Expand Reduce\n - 2022 | BERT-Sort: A Zero-shot MLM Semantic Encoder on Ordinal Features for AutoML | Mehdi Bahrami, et al. | AutoML | [`PDF`](https:\u002F\u002Fgithub.com\u002Fmarscod\u002FBERT-Sort)\n  - 2017 | AutoLearn — Automated Feature Generation and Selection | Ambika Kaul, et al. | ICDM | [`PDF`](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8215494\u002F)\n  - 2017 | One button machine for automating feature engineering in relational databases | Hoang Thanh Lam, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1706.00327.pdf)\n  - 2016 | Automating Feature Engineering | Udayan Khurana, et al. | NIPS | [`PDF`](http:\u002F\u002Fworkshops.inf.ed.ac.uk\u002Fnips2016-ai4datasci\u002Fpapers\u002FNIPS2016-AI4DataSci_paper_13.pdf)\n  - 2016 | ExploreKit: Automatic Feature Generation and Selection | Gilad Katz, et al. | ICDM | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F7837936\u002F)\n  - 2015 | Deep Feature Synthesis: Towards Automating Data Science Endeavors | James Max Kanter, Kalyan Veeramachaneni | DSAA | [`PDF`](http:\u002F\u002Fwww.jmaxkanter.com\u002Fstatic\u002Fpapers\u002FDSAA_DSM_2015.pdf)\n+ #### Hierarchical Organization of Transformations\n  - 2016 | Cognito: Automated Feature Engineering for Supervised Learning | Udayan Khurana, et al. | ICDMW | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F7836821\u002F)\n+ #### Meta Learning\n  - 2020 | AutoML Pipeline Selection: Efficiently Navigating the Combinatorial Space | Chengrun Yang, et al. | KDD | [`PDF`](https:\u002F\u002Fpeople.ece.cornell.edu\u002Fcy\u002F_papers\u002Ftensor_oboe.pdf)\n  - 2017 | Learning Feature Engineering for Classification | Fatemeh Nargesian, et al. | IJCAI | [`PDF`](https:\u002F\u002Fwww.ijcai.org\u002Fproceedings\u002F2017\u002F0352.pdf)\n+ #### Reinforcement Learning\n  - 2017 | Feature Engineering for Predictive Modeling using Reinforcement Learning | Udayan Khurana, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1709.07150.pdf)\n  - 2010 | Feature Selection as a One-Player Game | Romaric Gaudel, Michele Sebag | ICML | [`PDF`](https:\u002F\u002Fhal.archives-ouvertes.fr\u002Finria-00484049\u002Fdocument)\n### Architecture Search\n+ #### Evolutionary Algorithms\n  - 2019 | Evolutionary Neural AutoML for Deep Learning | Jason Liang, et al. | GECCO | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3321707.3321721)\n  - 2017 | Large-Scale Evolution of Image Classifiers | Esteban Real, et al. | PMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1703.01041)\n  - 2002 | Evolving Neural Networks through Augmenting Topologies | Kenneth O.Stanley, Risto Miikkulainen | Evolutionary Computation | [`PDF`](http:\u002F\u002Fnn.cs.utexas.edu\u002Fdownloads\u002Fpapers\u002Fstanley.ec02.pdf)\n+ #### Local Search\n  - 2017 | Simple and Efficient Architecture Search for Convolutional Neural Networks | Thomoas Elsken, et al. | ICLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1711.04528.pdf)\n+ #### Meta Learning\n  - 2016 | Learning to Optimize | Ke Li, Jitendra Malik | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1606.01885.pdf)\n+ #### Reinforcement Learning\n  - 2018 | AMC: AutoML for Model Compression and Acceleration on Mobile Devices | Yihui He, et al. | ECCV | [`PDF`](http:\u002F\u002Fopenaccess.thecvf.com\u002Fcontent_ECCV_2018\u002Fpapers\u002FYihui_He_AMC_Automated_Model_ECCV_2018_paper.pdf)\n  - 2018 | Efficient Neural Architecture Search via Parameter Sharing | Hieu Pham, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1802.03268)\n  - 2017 | Neural Architecture Search with Reinforcement Learning | Barret Zoph, Quoc V. Le | ICLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1611.01578.pdf)\n+ #### Transfer Learning\n  - 2017 | Learning Transferable Architectures for Scalable Image Recognition | Barret Zoph, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1707.07012)\n+ #### Network Morphism\n  - 2019 | Auto-Keras: An Efficient Neural Architecture Search System | Haifeng Jin, et al. | KDD | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3292500.3330648)\n+ #### Continuous Optimization\n  - 2018 | Neural Architecture Optimization | Renqian Luo, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1808.07233)\n  - 2019 | DARTS: Differentiable Architecture Search | Hanxiao Liu, et al. | ICLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1806.09055)\n  - 2021 | SEDONA: Search for Decoupled Neural Networks toward Greedy Block-wise Learning | Pyeon, et al. | ICLR | [`PDF`](https:\u002F\u002Fopenreview.net\u002Fpdf?id=XLfdzwNKzch)\n\n### Frameworks\n+ 2019 | Auptimizer -- an Extensible, Open-Source Framework for Hyperparameter Tuning | Jiayi Liu, et al. | IEEE Big Data | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1911.02522)\n+ 2019 | Towards modular and programmable architecture search | Renato Negrinho, et al. | NeurIPS | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1909.13404.pdf)\n+ 2019 | Evolutionary Neural AutoML for Deep Learning | Jason Liang, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1902.06827)\n+ 2017 | ATM: A Distributed, Collaborative, Scalable System for Automated Machine Learning | T. Swearingen, et al. | IEEE | [`PDF`](https:\u002F\u002Fcyphe.rs\u002Fstatic\u002Fatm.pdf)\n+ 2017 | Google Vizier: A Service for Black-Box Optimization | Daniel Golovin, et al. | KDD |[`PDF`](https:\u002F\u002Fstatic.googleusercontent.com\u002Fmedia\u002Fresearch.google.com\u002Fzh-CN\u002F\u002Fpubs\u002Farchive\u002F46180.pdf)\n+ 2015 | AutoCompete: A Framework for Machine Learning Competitions | Abhishek Thakur, et al. | ICML | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1507.02188.pdf)\n\n### Hyperparameter Optimization\n+ #### Bayesian Optimization\n  - 2020 | Bayesian Optimization of Risk Measures | NeurIPS | [`PDF`](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2020\u002Ffile\u002Fe8f2779682fd11fa2067beffc27a9192-Paper.pdf)\n  - 2020 | BOTORCH: A Framework for Efficient Monte-Carlo Bayesian Optimization | NeurIPS | [`PDF`](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2020\u002Ffile\u002Ff5b1b89d98b7286673128a5fb112cb9a-Paper.pdf)\n  - 2020 | Tuning Hyperparameters without Grad Students: Scalable and Robust Bayesian Optimisation with Dragonfly | JMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1903.06694.pdf)\n  - 2019 | Bayesian Optimization with Unknown Search Space  | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F9350-bayesian-optimization-with-unknown-search-space.pdf)\n  - 2019 | Constrained Bayesian optimization with noisy experiments | [`PDF`](https:\u002F\u002Fprojecteuclid.org\u002Fdownload\u002Fpdfview_1\u002Feuclid.ba\u002F1533866666)\n  - 2019 | Learning search spaces for Bayesian optimization: Another view of hyperparameter transfer learning | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F9438-learning-search-spaces-for-bayesian-optimization-another-view-of-hyperparameter-transfer-learning.pdf)\n  - 2019 | Practical Two-Step Lookahead Bayesian Optimization | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F9174-practical-two-step-lookahead-bayesian-optimization.pdf)\n  - 2019 | Predictive entropy search for multi-objective bayesian optimization with constraints | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1609.01051.pdf)\n  - 2018 | BOCK: Bayesian optimization with cylindrical kernels | ICML | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1806.01619.pdf)\n  - 2018 | Efficient High Dimensional Bayesian Optimization with Additivity and Quadrature Fourier Features | Mojmír Mutný, et al. | NeurIPS | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F8115-efficient-high-dimensional-bayesian-optimization-with-additivity-and-quadrature-fourier-features.pdf)\n  - 2018 | High-Dimensional Bayesian Optimization via Additive Models with Overlapping Groups. |  PMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1802.07028v2.pdf)\n  - 2018 | Maximizing acquisition functions for Bayesian optimization | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F8194-maximizing-acquisition-functions-for-bayesian-optimization.pdf)\n  - 2018 | Scalable hyperparameter transfer learning | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F7917-scalable-hyperparameter-transfer-learning.pdf)\n  - 2016 | Bayesian Optimization with Robust Bayesian Neural Networks | Jost Tobias Springenberg， et al. | NIPS | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F6117-bayesian-optimization-with-robust-bayesian-neural-networks.pdf)\n  - 2016 | Scalable Hyperparameter Optimization with Products of Gaussian Process Experts | Nicolas Schilling, et al. | PKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-46128-1_3)\n  - 2016 | Taking the Human Out of the Loop: A Review of Bayesian Optimization | Bobak Shahriari, et al. | IEEE | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F7352306\u002F)\n  - 2016 | Towards Automatically-Tuned Neural Networks | Hector Mendoza, et al. | JMLR | [`PDF`](http:\u002F\u002Faad.informatik.uni-freiburg.de\u002Fpapers\u002F16-AUTOML-AutoNet.pdf)\n  - 2016 | Two-Stage Transfer Surrogate Model for Automatic Hyperparameter Optimization | Martin Wistuba, et al. | PKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-46128-1_13)\n  - 2015 | Efficient and Robust Automated Machine Learning | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F5872-efficient-and-robust-automated-machine-learning.pdf)\n  - 2015 | Hyperparameter Optimization with Factorized Multilayer Perceptrons | Nicolas Schilling, et al. | PKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-23525-7_6)\n  - 2015 | Hyperparameter Search Space Pruning - A New Component for Sequential Model-Based Hyperparameter Optimization | Martin Wistua, et al. | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=2991491)\n  - 2015 | Joint Model Choice and Hyperparameter Optimization with Factorized Multilayer Perceptrons | Nicolas Schilling, et al. | ICTAI | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7372120\u002F)\n  - 2015 | Learning Hyperparameter Optimization Initializations | Martin Wistuba, et al. | DSAA | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7344817\u002F)\n  - 2015 | Scalable Bayesian optimization using deep neural networks | Jasper Snoek, et al. | ACM | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=3045349)\n  - 2015 | Sequential Model-free Hyperparameter Tuning | Martin Wistuba, et al. | ICDM | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7373431\u002F)\n  - 2013 | Auto-WEKA: Combined Selection and Hyperparameter Optimization of Classification Algorithms | [`PDF`](http:\u002F\u002Fwww.cs.ubc.ca\u002Flabs\u002Fbeta\u002FProjects\u002Fautoweka\u002Fpapers\u002Fautoweka.pdf)\n  - 2013 | Making a Science of Model Search: Hyperparameter Optimization in Hundreds of Dimensions for Vision Architectures | J. Bergstra | JMLR | [`PDF`](http:\u002F\u002Fproceedings.mlr.press\u002Fv28\u002Fbergstra13.pdf)\n  - 2012 | Practical Bayesian Optimization of Machine Learning Algorithms | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf)\n  - 2011 | Sequential Model-Based Optimization for General Algorithm Configuration(extended version) | [`PDF`](https:\u002F\u002Fwww.cs.ubc.ca\u002F~hutter\u002Fpapers\u002F10-TR-SMAC.pdf)\n+ #### Evolutionary Algorithms\n  - 2020 | Delta-STN: Efficient Bilevel Optimization for Neural Networks using Structured Response Jacobians | Juhan Bae, Roger Grosse | Neurips | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F2010.13514)\n  - 2018 | Autostacker: A Compositional Evolutionary Learning System | Boyuan Chen, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1803.00684.pdf)\n  - 2017 | Large-Scale Evolution of Image Classifiers | Esteban Real, et al. | PMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1703.01041.pdf)\n  - 2016 | Automating biomedical data science through tree-based pipeline optimization | Randal S. Olson, et al. | ECAL | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1601.07925.pdf)\n  - 2016 | Evaluation of a tree-based pipeline optimization tool for automating data science | Randal S. Olson, et al. | GECCO | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=2908918)\n+ ####  Lipschitz Functions\n  - 2017 | Global Optimization of Lipschitz functions | C´edric Malherbe, Nicolas Vayatis | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1703.02628.pdf)\n+ #### Local Search\n  - 2009 | ParamILS: An Automatic Algorithm Configuration Framework | Frank Hutter, et al. | JAIR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1401.3492.pdf)\n+ #### Meta Learning\n  - 2019 | OBOE: Collaborative Filtering for AutoML Model Selection | Chengrun Yang, et al. | KDD | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3292500.3330909) \n  - 2019 | SMARTML: A Meta Learning-Based Framework for Automated Selection and Hyperparameter Tuning for Machine Learning Algorithms | [`PDF`](http:\u002F\u002Fopenproceedings.org\u002F2019\u002Fconf\u002Fedbt\u002FEDBT19_paper_235.pdf)\n  - 2008 | Cross-Disciplinary Perspectives on Meta-Learning for Algorithm Selection | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=1456656)\n+ #### Particle Swarm Optimization\n  - 2017 | Particle Swarm Optimization for Hyper-parameter Selection in Deep Neural Networks | Pablo Ribalta Lorenzo, et al. | GECCO | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=3071208)\n  - 2008 | Particle Swarm Optimization for Parameter Determination and Feature Selection of Support Vector Machines | Shih-Wei Lin, et al. | Expert Systems with Applications | [`PDF`](http:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS0957417407003752)\n+ #### Random Search\n  - 2016 | Hyperband: A Novel Bandit-Based Approach to Hyperparameter Optimization | Lisha Li, et al. | arXiv |  [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1603.06560.pdf)\n  - 2012 | Random Search for Hyper-Parameter Optimization | James Bergstra, Yoshua Bengio | JMLR | [`PDF`](http:\u002F\u002Fwww.jmlr.org\u002Fpapers\u002Fvolume13\u002Fbergstra12a\u002Fbergstra12a.pdf)\n  - 2011 | Algorithms for Hyper-parameter Optimization | James Bergstra, et al. | NIPS | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=2986743)\n+ #### Transfer Learning\n  - 2016 | Efficient Transfer Learning Method for Automatic Hyperparameter Tuning | Dani Yogatama, Gideon Mann | JMLR | [`PDF`](https:\u002F\u002Fpdfs.semanticscholar.org\u002F75f2\u002F6734972ebaffc6b43d45abd3048ef75f15a5.pdf)\n  - 2016 | Flexible Transfer Learning Framework for Bayesian Optimisation | Tinu Theckel Joy, et al. | PAKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-31753-3_9)\n  - 2016 | Hyperparameter Optimization Machines | Martin Wistuba, et al. | DSAA | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7796889\u002F)\n  - 2013 | Collaborative Hyperparameter Tuning | R´emi Bardenet, et al. | ICML | [`PDF`](http:\u002F\u002Fproceedings.mlr.press\u002Fv28\u002Fbardenet13.pdf)\n\n### Miscellaneous\n- 2020 | Automated Machine Learning Techniques for Data Streams | Alexandru-Ionut Imbrea | [`PDF`](https:\u002F\u002Fgithub.com\u002FAlexImb\u002Fautoml-streams-research-paper\u002Fraw\u002Fmaster\u002FA_Imbrea_AutoML_Data_Streams.pdf)\n- 2018 | Accelerating Neural Architecture Search using Performance Prediction | Bowen Baker, et al. | ICLR | [`PDF`](https:\u002F\u002Fopenreview.net\u002Fpdf?id=BJypUGZ0Z)\n- 2017 | Automatic Frankensteining: Creating Complex Ensembles Autonomously | Martin Wistuba, et al. | SIAM | [`PDF`](http:\u002F\u002Fepubs.siam.org\u002Fdoi\u002Fpdf\u002F10.1137\u002F1.9781611974973.83)\n- 2018 | Characterizing classification datasets: A study of meta-features for meta-learning | Rivolli, Adriano, et al. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1808.10406.pdf)\n- 2020 | Putting the Human Back in the AutoML Loop | Xanthopoulos, Iordanis, et al. | EDBT\u002FICDT | [`PDF`](http:\u002F\u002Fceur-ws.org\u002FVol-2578\u002FETMLP5.pdf)\n\n# Tutorials\n### Bayesian Optimization\n+ 2018 | A Tutorial on Bayesian Optimization. | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1807.02811.pdf)\n+ 2010 | A Tutorial on Bayesian Optimization of Expensive Cost Functions, with Application to Active User Modeling and Hierarchical Reinforcement Learning | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1012.2599v1.pdf)\n### Meta Learning\n+ 2008 | Metalearning - A Tutorial | [`PDF`](https:\u002F\u002Fpdfs.semanticscholar.org\u002F5794\u002F1a4891f673cadf06fba02419372aad85c3bb.pdf)\n\n\n# Blog\n| Type       | Blog Title | Link       |\n| :--------: | :--------: | :--------: |\n| HPO        | Bayesian Optimization for Hyperparameter Tuning | [`Link`](https:\u002F\u002Farimo.com\u002Fdata-science\u002F2016\u002Fbayesian-optimization-hyperparameter-tuning\u002F) |\n| Meta-Learning | Learning to learn | [`Link`](http:\u002F\u002Fbair.berkeley.edu\u002Fblog\u002F2017\u002F07\u002F18\u002Flearning-to-learn\u002F) |\n| Meta-Learning | Why Meta-learning is Crucial for Further Advances of Artificial Intelligence? | [`Link`](https:\u002F\u002Fchatbotslife.com\u002Fwhy-meta-learning-is-crucial-for-further-advances-of-artificial-intelligence-c2df55959adf) |\n\n\n# Books\n| Year of Publication     | Type    | Book Title | Authors    | Publisher  | Link       |\n| :--------: | :--------: | :--------: | :--------: | :--------: | :--------: |\n| 2009       | Meta-Learning | Metalearning - Applications to Data Mining | Brazdil, P., Giraud Carrier, C., Soares, C., Vilalta, R. | Springer | [`Download`](http:\u002F\u002Fwww.springer.com\u002Fla\u002Fbook\u002F9783540732624) |\n| 2019       | HPO, Meta-Learning, NAS | AutoML: Methods, Systems, Challenges | Frank Hutter, Lars Kotthoff, Joaquin Vanschoren |         | [`Download`](https:\u002F\u002Fwww.automl.org\u002Fbook\u002F) |\n| 2021       | Learning | Automated Machine Learning in Action | Qinquan Song, Haifeng Jin, Xia Hu | Manning Publications        | [`Download`](https:\u002F\u002Fwww.manning.com\u002Fbooks\u002Fautomated-machine-learning-in-action) |\n\n\n# Videos\n| Title    | Author    | Link     |\n| AutoML Basics: Automated Machine Learning in Action | Qinquan Song, Haifeng Jin, Xia Hu | (https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=9KpieG0B7VM) |\n\n\n# Projects\n| Project    | Type       | Language   | License    | Link       |\n| :--------: | :--------: | :--------: | :--------: | :--------: |\n| AdaNet     | NAS        | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Ftensorflow\u002Fadanet) |\n| Advisor    | HPO        | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Ftobegit3hub\u002Fadvisor) |\n| AMLA       | HPO, NAS   | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002FCiscoAI\u002Famla) |\n| ATM        | HPO        | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FHDI-Project\u002FATM) |\n| Auger      | HPO        | Python     | Commercial | [`Homepage`](https:\u002F\u002Fauger.ai) |\n| auptimizer | HPO, NAS   | Python (support R script) | GPL-3.0  | [`Github`](https:\u002F\u002Fgithub.com\u002FLGE-ARC-AdvancedAI\u002Fauptimizer) |\n| Auto-Keras | NAS        | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fkeras-team\u002Fautokeras\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fkeras-team\u002Fautokeras) |\n| AutoML Vision | NAS     | Python     | Commercial | [`Homepage`](https:\u002F\u002Fcloud.google.com\u002Fvision\u002F) |\n| AutoML Video Intelligence            | NAS        | Python    | Commercial | [`Homepage`](https:\u002F\u002Fcloud.google.com\u002Fvideo-intelligence\u002F) |\n| AutoML Natural Language | NAS        | Python     | Commercial | [`Homepage`](https:\u002F\u002Fcloud.google.com\u002Fnatural-language\u002F) |\n| AutoML Translation      | NAS        | Python     | Commercial | [`Homepage`](https:\u002F\u002Fcloud.google.com\u002Ftranslate\u002F) |\n| AutoML Tables           | AutoFE, HPO             | Python    | Commercial | [`Homepage`](https:\u002F\u002Fcloud.google.com\u002Fautoml-tables\u002F) |\n| AutoPyTorch | HPO, NAS   | Python    | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FAuto-PyTorch) |\n| HyperGBM                | HPO| Python    | Python | [`Github`](https:\u002F\u002Fgithub.com\u002FDataCanvasIO\u002FHyperGBM\u002F) |\n| HyperKeras              | NAS        | Python    | Python | [`Github`](https:\u002F\u002Fgithub.com\u002FDataCanvasIO\u002FHyperKeras\u002F) |\n| Hypernets               | HPO, NAS   | Python    | Python | [`Github`](https:\u002F\u002Fgithub.com\u002FDataCanvasIO\u002FHypernets\u002F) |\n| auto-sklearn | HPO      | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fautoml\u002Fauto-sklearn\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002Fauto-sklearn) |\n| auto_ml     | HPO       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FClimbsRocks\u002Fauto_ml) |\n| BayesianOptimization | HPO | Python  | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Ffmfn\u002FBayesianOptimization) |\n| BayesOpt    | HPO       | C++        | AGPL-3.0   | [`Github`](https:\u002F\u002Fgithub.com\u002Frmcantin\u002Fbayesopt) |\n| comet       | HPO       | Python     | Commercial | [`Homepage`](https:\u002F\u002Fwww.comet.ml) |\n| DataRobot   | HPO       | Python     | Commercial | [`Homepage`](https:\u002F\u002Fwww.datarobot.com\u002F) |\n| DEvol       | NAS       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Fjoeddav\u002Fdevol) |\n| DeepArchitect | NAS     | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Fnegrinho\u002Fdeep_architect) |\n| Determined | HPO, NAS   | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fdetermined-ai\u002Fdetermined) |\n| Driverless AI | AutoFE  | Python     | Commercial | [`Homepage`](https:\u002F\u002Fwww.h2o.ai\u002Fproducts\u002Fh2o-driverless-ai\u002F) |\n| FAR-HO      | HPO       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Flucfra\u002FFAR-HO) |\n| H2O AutoML  | HPO       | Python, R, Java, Scala | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fh2oai\u002Fh2o-3\u002F) |\n| HpBandSter  | HPO       | Python     | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FHpBandSter) |\n| HyperBand   | HPO       | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fzygmuntz\u002Fhyperband\u002Fblob\u002Fmaster\u002FLICENSE) | [`Github`](https:\u002F\u002Fgithub.com\u002Fzygmuntz\u002Fhyperband) |\n| Hyperopt    | HPO       | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt) |\n| Hyperopt-sklearn | HPO  | Python    | [`License`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt-sklearn\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt-sklearn) |\n| Hyperparameter Hunter | HPO | Python | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FHunterMcGushion\u002Fhyperparameter_hunter) |\n| Katib       | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fkubeflow\u002Fkatib) |\n| MateLabs    | HPO       | Python     | Commercial | [`Github`](http:\u002F\u002Fmatelabs.in\u002F) |\n| Milano      | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002FNVIDIA\u002FMilano) |\n| MLJAR       | AutoFE, HPO, NAS       | Python     | MIT | [`Github`](https:\u002F\u002Fgithub.com\u002Fmljar\u002Fmljar-supervised) |\n| mlr3automl       | HPO       | R     | LGPL-3.0 | [`GitHub`](https:\u002F\u002Fgithub.com\u002Fa-hanf\u002Fmlr3automl) |\n| nasbot      | NAS       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Fkirthevasank\u002Fnasbot) |\n| neptune     | HPO       | Python     | Commercial | [`Homepage`](https:\u002F\u002Fneptune.ml\u002F) |\n| NNI         | HPO, NAS  | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FMicrosoft\u002Fnni) |\n| Oboe    | HPO       | Python     | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fudellgroup\u002Foboe) |\n| Optunity    | HPO       | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fclaesenm\u002Foptunity\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fclaesenm\u002Foptunity) |\n| R2.ai       | HPO       |            | Commercial | [`Homepage`](https:\u002F\u002Fr2.ai\u002F) |\n| RBFOpt      | HPO       | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fcoin-or\u002Frbfopt\u002Fblob\u002Fmaster\u002FLICENSE) | [`Github`](https:\u002F\u002Fgithub.com\u002Fcoin-or\u002Frbfopt) |\n| RoBO        | HPO       | Python     | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FRoBO) |\n| Scikit-Optimize | HPO   | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fscikit-optimize\u002Fscikit-optimize\u002Fblob\u002Fmaster\u002FLICENSE.md) | [`Github`](https:\u002F\u002Fgithub.com\u002Fscikit-optimize\u002Fscikit-optimize) |\n| SigOpt      | HPO       | Python     | Commercial | [`Homepage`](https:\u002F\u002Fsigopt.com\u002F) |\n| SMAC3       | HPO       | Python     | [`License`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FSMAC3\u002Fblob\u002Fmaster\u002FLICENSE) | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FSMAC3) |\n| TPOT        | AutoFE, HPO | Python   | LGPL-3.0   | [`Github`](https:\u002F\u002Fgithub.com\u002Frhiever\u002Ftpot) |\n| TransmogrifAI | HPO     | Scala      | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fsalesforce\u002FTransmogrifAI) |\n| Tune        | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fray-project\u002Fray\u002Ftree\u002Fmaster\u002Fpython\u002Fray\u002Ftune) |\n| Xcessiv     | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Freiinakano\u002Fxcessiv) |\n| SmartML     | HPO       | R          | GPL-3.0    | [`Github`](https:\u002F\u002Fgithub.com\u002FDataSystemsGroupUT\u002FSmartML) |\n| MLBox     | AutoFE, HPO    | Python          | BSD-3 License    | [`Github`](https:\u002F\u002Fgithub.com\u002FAxeldeRomblay\u002FMLBox) |\n| AutoAI Watson     | AutoFE, HPO    |      | Commercial    | [`Homepage`](https:\u002F\u002Fwww.ibm.com\u002Fcloud\u002Fwatson-studio\u002Fautoai) |\n| AUtoML    | AutoML | Python     | MIT    | [`Github`](https:\u002F\u002Fgithub.com\u002FWestern-OC2-Lab\u002FAutoML-Implementation-for-Static-and-Dynamic-Data-Analytics) |\n| Optuna      | HPO       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Foptuna\u002Foptuna) |\n  \n\n# Slides\n| Type    | Slide Title | Authors    | Link       |\n| :--------: | :--------: | :--------: | :--------: |\n| AutoFE     | Automated Feature Engineering for Predictive Modeling | Udyan Khurana, etc al. | [`Download`](https:\u002F\u002Fgithub.com\u002Fhibayesian\u002Fawesome-automl-papers\u002Fblob\u002Fmaster\u002Fresources\u002Fslides\u002F%5Bslides%5D-automated-feature-engineering-for-predictive-modeling.pdf) |\n| HPO     | A Tutorial on Bayesian Optimization for Machine Learning | Ryan P. Adams | [`Download`](https:\u002F\u002Fgithub.com\u002Fhibayesian\u002Fawesome-automl-papers\u002Fblob\u002Fmaster\u002Fresources\u002Fslides\u002F%5Bslides%5D-a-tutorial-on-bayesian-optimization-for-machine-learning.pdf) |\n| HPO     | Bayesian Optimisation | Gilles Louppe | [`Download`](https:\u002F\u002Fgithub.com\u002Fhibayesian\u002Fawesome-automl-papers\u002Fblob\u002Fmaster\u002Fresources\u002Fslides\u002F%5Bslides%5D-bayesian-optimisation.pdf) |\n\n\n# Acknowledgement\nSpecial thanks to everyone who contributed to this project.\n\n| Name       | Bio        |\n| :--------: | :--------: |\n| [Alexander Robles](https:\u002F\u002Fgithub.com\u002FAlro10) | PhD Student @UNICAMP-Brazil |\n| [derekflint](https:\u002F\u002Fgithub.com\u002Fderekflint) | |\n| [endymecy](https:\u002F\u002Fgithub.com\u002Fendymecy) | Senior Researcher @Tencent |\n| [Eric](https:\u002F\u002Fgithub.com\u002FffengENG) | |\n| [Erin LeDell](https:\u002F\u002Fgithub.com\u002Fledell) | Chief Machine Learning Scientist @H2O.ai |\n| [fwcore](https:\u002F\u002Fgithub.com\u002Ffwcore) | |\n| [Gaurav Mittal](https:\u002F\u002Fgithub.com\u002Fg1910) | |\n| [Hernan Ceferino Vazquez](https:\u002F\u002Fgithub.com\u002Fhcvazquez) | PhD, Data Science Expert @MercadoLibre |\n| [Kaustubh Damania](https:\u002F\u002Fgithub.com\u002FKaustubhDamania) | |\n| [Lilian Besson](https:\u002F\u002Fgithub.com\u002Fnaereen) | PhD Student @CentraleSupélec |\n| [罗磊](https:\u002F\u002Fgithub.com\u002Fluoleicn) | |\n| [Marc](https:\u002F\u002Fgithub.com\u002FEnnosigaeon) | |\n| [Mohamed Maher](https:\u002F\u002Fgithub.com\u002Fmmaher22) | |\n| [Neil Conway](https:\u002F\u002Fgithub.com\u002Fneilconway) | CTO @Determined AI |\n| [Richard Liaw](https:\u002F\u002Fgithub.com\u002Frichardliaw) | PhD Student @UC Berkeley|\n| [Randy Olson](https:\u002F\u002Fgithub.com\u002Frhiever) | Lead Data Scientist @LifeEGX |\n| [Slava Kurilyak](https:\u002F\u002Fgithub.com\u002Fslavakurilyak) | Founder, CEO @Produvia |\n| [Saket Maheshwary](https:\u002F\u002Fgithub.com\u002Fsaket-maheshwary) | AI Researcher |\n| [shaido987](https:\u002F\u002Fgithub.com\u002Fshaido987) | |\n| [sophia-wright-blue](https:\u002F\u002Fgithub.com\u002Fsophia-wright-blue) | |\n| [tengben0905](https:\u002F\u002Fgithub.com\u002Ftengben0905) | |\n| [xuehui](https:\u002F\u002Fgithub.com\u002Fxuehui1991) | @Microsoft |\n| [Yihui He](https:\u002F\u002Fgithub.com\u002Fyihui-he) | Grad Student @CMU  |\n\n\n# Contact & Feedback\nIf you have any suggestions (missing papers, new papers, key researchers or typos), feel free to pull a request. Also you can mail to:\n+ Mark Lin (hibayesian@gmail.com).\n\n\n# Licenses\nAwesome-AutoML-Papers is available under Apache Licenses 2.0.\n","# 令人惊叹的AutoML论文\n\n**令人惊叹的AutoML论文** 是一份精心整理的 *自动化机器学习* 论文、文章、教程、幻灯片和项目的列表。请给这个仓库点个 **星标**，这样你就能及时了解这一蓬勃发展的研究领域的最新进展。感谢所有为该项目做出贡献的人们。欢迎加入我们，也欢迎您成为贡献者。\n\n\u003Cdiv style=\"text-align: center\">\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_readme_c4b1e8f7046c.png\" atl=\"banner\"\u002F>\n\u003C\u002Fdiv>\n\n# 什么是AutoML？\n*自动化机器学习*（AutoML）提供了一系列方法和流程，旨在让非机器学习专家也能使用机器学习技术，提高机器学习的效率，并加速机器学习的研究进程。\n\n近年来，机器学习取得了巨大的成功，越来越多的学科开始依赖它。然而，这些成功在很大程度上依赖于人类机器学习专家来完成以下任务：\n+ 数据预处理，\n+ 选择合适的特征，\n+ 选择合适的模型族，\n+ 优化模型超参数，\n+ 对机器学习模型进行后处理，\n+ 对获得的结果进行批判性分析。\n\n由于这些任务的复杂性往往超出了非机器学习专家的能力范围，机器学习应用的迅速增长催生了对开箱即用、无需专业知识即可轻松使用的机器学习方法的需求。我们将致力于逐步实现机器学习自动化的研究领域称为 *AutoML*。作为机器学习的一个新兴子领域，*AutoML* 不仅在机器学习领域备受关注，也在计算机视觉、自然语言处理和图计算等领域引起了广泛兴趣。\n\n目前并没有关于 *AutoML* 的正式定义。根据大多数论文的描述，*AutoML* 的基本流程可以概括如下：\n\n\u003Cdiv style=\"text-align: center\">\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_readme_46343347a781.jpg\" width=\"600px\" atl=\"figure1\"\u002F>\n\u003C\u002Fdiv>\n\n\n*AutoML* 方法已经足够成熟，能够在某些情况下与人类机器学习专家相媲美，甚至超越他们。简而言之，*AutoML* 可以在提升性能的同时，节省大量时间和金钱，因为机器学习专家既难寻又昂贵。因此，近年来商业界对 *AutoML* 的兴趣急剧增长，多家大型科技公司和初创企业正在开发各自的 *AutoML* 系统。以下表格总结了其中一些公司的概况对比：\n\n| 公司       | AutoFE     | HPO        | NAS        |\n| :-----------: | :--------: | :--------: | :--------: |\n| 4paradigm     |     √      |      √     |      ×     |\n| Alibaba       |     ×      |      √     |      ×     |\n| Baidu         |     ×      |      ×     |      √     |\n| Determined AI |     ×      |      √     |      √     |\n| Google        |     √      |      √     |      √     |\n| DataCanvas    |     √      |      √     |      √     |\n| H2O.ai        |     √      |      √     |      ×     |\n| Microsoft     |     ×      |      √     |      √     |\n| MLJAR         |     √      |      √     |      √     |\n| RapidMiner    |     √      |      √     |      ×     |\n| Tencent       |     ×      |      √     |      ×     |\n\n\n**令人惊叹的AutoML论文** 包含了我们在 *AutoML* 中所需的核心技术的最新综述：\n+ 自动化数据清洗（Auto Clean）\n+ 自动化特征工程（Auto FE）\n+ 超参数优化（HPO）\n+ 元学习\n+ 神经架构搜索（NAS）\n\n\n\u003Cdiv style=\"text-align: center\">\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_readme_b955aa114624.png\" atl=\"automl\"\u002F>\n\u003C\u002Fdiv>\n\n\n# 目录\n+ [论文](#papers)\n  - [综述](#surveys)\n  - [自动化特征工程](#automated-feature-engineering)\n    - [扩展-约简](#expand-reduce)\n    - [变换的层次化组织](#hierarchical-organization-of-transformations)\n    - [元学习](#meta-learning)\n    - [强化学习](#reinforcement-learning)\n  - [架构搜索](#architecture-search)\n    - [进化算法](#evolutionary-algorithms)\n    - [局部搜索](#local-search)\n    - [元学习](#meta-learning-1)\n    - [强化学习](#reinforcement-learning-1)\n    - [迁移学习](#transfer-learning)\n    - [网络形态学](#network-morphism)\n    - [连续优化](#continuous-optimization)\n  - [超参数优化](#hyperparameter-optimization)\n    - [贝叶斯优化](#bayesian-optimization)\n    - [进化算法](#evolutionary-algorithms-1)\n    - [利普希茨函数](#lipschitz-functions)\n    - [局部搜索](#local-search-1)\n    - [元学习](#meta-learning-2)\n    - [粒子群优化](#particle-swarm-optimization)\n    - [随机搜索](#random-search)\n    - [迁移学习](#transfer-learning-1)\n  - [性能预测](#performance-prediction)\n    - [性能预测](##)\n  - [框架](#frameworks)\n  - [其他](#miscellaneous)\n+ [教程](#tutorials)\n  - [贝叶斯优化](#bayesian-optimization)\n  - [元学习](#meta-learning-3)\n+ [文章](#articles)\n  - [贝叶斯优化](#bayesian-optimization)\n  - [元学习](#meta-learning)\n+ [幻灯片](#slides)\n  - [贝叶斯优化](#slides)\n+ [书籍](#books)\n  - [元学习](#books)\n+ [项目](#projects)\n+ [杰出研究人员](#prominent-researchers)\n\n# 论文\n### 综述\n+ 2019年 | AutoML：现状综述 | Xin He等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1908.00709.pdf)\n+ 2019年 | 自动化机器学习综述 | Marc Zoeller, Marco F. Huber | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1904.12054.pdf)\n+ 2019年 | 自动化机器学习：现状与开放挑战 | Radwa Elshawi等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1906.02287.pdf)\n+ 2018年 | 将人类从学习应用中解放出来：自动化机器学习综述 | Quanming Yao等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1810.13306.pdf)\n+ 2020年 | 关于机器学习算法超参数优化的理论与实践 | Li Yang等 | Neurocomputing | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F2007.15745.pdf)\n+ 2020年 | 自动化机器学习——早期阶段结束时的简要回顾 | Escalante, H. J. | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F2008.08516.pdf)\n+ 2022年 | 动态环境中的物联网数据分析：从自动化机器学习的角度 | Li Yang等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F2209.08018.pdf)\n+ 2024年 | 自动化机器学习：过去、现在和未来 | Baratchi. M等 | 人工智能评论 | [`Springer`](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs10462-024-10726-1)\n\n### 自动化特征工程\n+ #### 扩展与约简\n - 2022 | BERT-Sort：用于 AutoML 的序数特征零样本 MLM 语义编码器 | Mehdi Bahrami 等 | AutoML | [`PDF`](https:\u002F\u002Fgithub.com\u002Fmarscod\u002FBERT-Sort)\n  - 2017 | AutoLearn — 自动化特征生成与选择 | Ambika Kaul 等 | ICDM | [`PDF`](https:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F8215494\u002F)\n  - 2017 | 一键式工具，用于自动化关系数据库中的特征工程 | Hoang Thanh Lam 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1706.00327.pdf)\n  - 2016 | 自动化特征工程 | Udayan Khurana 等 | NIPS | [`PDF`](http:\u002F\u002Fworkshops.inf.ed.ac.uk\u002Fnips2016-ai4datasci\u002Fpapers\u002FNIPS2016-AI4DataSci_paper_13.pdf)\n  - 2016 | ExploreKit：自动特征生成与选择 | Gilad Katz 等 | ICDM | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F7837936\u002F)\n  - 2015 | 深度特征合成：迈向数据科学工作的自动化 | James Max Kanter, Kalyan Veeramachaneni | DSAA | [`PDF`](http:\u002F\u002Fwww.jmaxkanter.com\u002Fstatic\u002Fpapers\u002FDSAA_DSM_2015.pdf)\n+ #### 转换的层次化组织\n  - 2016 | Cognito：面向监督学习的自动化特征工程 | Udayan Khurana 等 | ICDMW | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F7836821\u002F)\n+ #### 元学习\n  - 2020 | AutoML 流水线选择：高效探索组合空间 | Chengrun Yang 等 | KDD | [`PDF`](https:\u002F\u002Fpeople.ece.cornell.edu\u002Fcy\u002F_papers\u002Ftensor_oboe.pdf)\n  - 2017 | 学习分类任务的特征工程 | Fatemeh Nargesian 等 | IJCAI | [`PDF`](https:\u002F\u002Fwww.ijcai.org\u002Fproceedings\u002F2017\u002F0352.pdf)\n+ #### 强化学习\n  - 2017 | 基于强化学习的预测建模特征工程 | Udayan Khurana 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1709.07150.pdf)\n  - 2010 | 特征选择作为单人游戏 | Romaric Gaudel, Michele Sebag | ICML | [`PDF`](https:\u002F\u002Fhal.archives-ouvertes.fr\u002Finria-00484049\u002Fdocument)\n### 架构搜索\n+ #### 进化算法\n  - 2019 | 面向深度学习的进化神经 AutoML | Jason Liang 等 | GECCO | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3321707.3321721)\n  - 2017 | 大规模图像分类器进化 | Esteban Real 等 | PMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1703.01041)\n  - 2002 | 通过拓扑增益进化神经网络 | Kenneth O.Stanley, Risto Miikkulainen | Evolutionary Computation | [`PDF`](http:\u002F\u002Fnn.cs.utexas.edu\u002Fdownloads\u002Fpapers\u002Fstanley.ec02.pdf)\n+ #### 局部搜索\n  - 2017 | 卷积神经网络的简单高效架构搜索 | Thomoas Elsken 等 | ICLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1711.04528.pdf)\n+ #### 元学习\n  - 2016 | 学习优化 | Ke Li, Jitendra Malik | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1606.01885.pdf)\n+ #### 强化学习\n  - 2018 | AMC：面向移动设备的模型压缩与加速 AutoML | Yihui He 等 | ECCV | [`PDF`](http:\u002F\u002Fopenaccess.thecvf.com\u002Fcontent_ECCV_2018\u002Fpapers\u002FYihui_He_AMC_Automated_Model_ECCV_2018_paper.pdf)\n  - 2018 | 通过参数共享实现高效的神经架构搜索 | Hieu Pham 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1802.03268)\n  - 2017 | 基于强化学习的神经架构搜索 | Barret Zoph, Quoc V. Le | ICLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1611.01578.pdf)\n+ #### 迁移学习\n  - 2017 | 学习可迁移的架构以实现可扩展的图像识别 | Barret Zoph 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1707.07012)\n+ #### 网络形态学\n  - 2019 | Auto-Keras：高效的神经架构搜索系统 | Haifeng Jin 等 | KDD | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3292500.3330648)\n+ #### 连续优化\n  - 2018 | 神经架构优化 | Renqian Luo 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1808.07233)\n  - 2019 | DARTS：可微分架构搜索 | Hanxiao Liu 等 | ICLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1806.09055)\n  - 2021 | SEDONA：面向贪婪分块学习的解耦神经网络搜索 | Pyeon 等 | ICLR | [`PDF`](https:\u002F\u002Fopenreview.net\u002Fpdf?id=XLfdzwNKzch)\n\n### 框架\n+ 2019 | Auptimizer——一个可扩展的开源超参数调优框架 | Jiayi Liu 等 | IEEE Big Data | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1911.02522)\n+ 2019 | 向模块化和可编程的架构搜索迈进 | Renato Negrinho 等 | NeurIPS | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1909.13404.pdf)\n+ 2019 | 面向深度学习的进化神经 AutoML | Jason Liang 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1902.06827)\n+ 2017 | ATM：一个分布式、协作式、可扩展的自动化机器学习系统 | T. Swearingen 等 | IEEE | [`PDF`](https:\u002F\u002Fcyphe.rs\u002Fstatic\u002Fatm.pdf)\n+ 2017 | Google Vizier：黑盒优化服务 | Daniel Golovin 等 | KDD | [`PDF`](https:\u002F\u002Fstatic.googleusercontent.com\u002Fmedia\u002Fresearch.google.com\u002Fzh-CN\u002F\u002Fpubs\u002Farchive\u002F46180.pdf)\n+ 2015 | AutoCompete：机器学习竞赛框架 | Abhishek Thakur 等 | ICML | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1507.02188.pdf)\n\n### 超参数优化\n+ #### 贝叶斯优化\n  - 2020 | 风险度量的贝叶斯优化 | NeurIPS | [`PDF`](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2020\u002Ffile\u002Fe8f2779682fd11fa2067beffc27a9192-Paper.pdf)\n  - 2020 | BOTORCH：高效的蒙特卡洛贝叶斯优化框架 | NeurIPS | [`PDF`](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2020\u002Ffile\u002Ff5b1b89d98b7286673128a5fb112cb9a-Paper.pdf)\n  - 2020 | 无需研究生即可调优超参数：基于Dragonfly的可扩展且鲁棒的贝叶斯优化 | JMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1903.06694.pdf)\n  - 2019 | 搜索空间未知时的贝叶斯优化 | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F9350-bayesian-optimization-with-unknown-search-space.pdf)\n  - 2019 | 带有噪声实验的约束贝叶斯优化 | [`PDF`](https:\u002F\u002Fprojecteuclid.org\u002Fdownload\u002Fpdfview_1\u002Feuclid.ba\u002F1533866666)\n  - 2019 | 学习贝叶斯优化的搜索空间：超参数迁移学习的另一种视角 | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F9438-learning-search-spaces-for-bayesian-optimization-another-view-of-hyperparameter-transfer-learning.pdf)\n  - 2019 | 实用的两步前瞻贝叶斯优化 | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F9174-practical-two-step-lookahead-bayesian-optimization.pdf)\n  - 2019 | 带有约束的多目标贝叶斯优化中的预测熵搜索 | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1609.01051.pdf)\n  - 2018 | BOCK：基于圆柱核的贝叶斯优化 | ICML | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1806.01619.pdf)\n  - 2018 | 利用加性与求积傅里叶特征的高效高维贝叶斯优化 | Mojmír Mutný 等 | NeurIPS | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F8115-efficient-high-dimensional-bayesian-optimization-with-additivity-and-quadrature-fourier-features.pdf)\n  - 2018 | 通过具有重叠分组的加性模型进行高维贝叶斯优化 | PMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1802.07028v2.pdf)\n  - 2018 | 贝叶斯优化中获取函数的最大化 | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F8194-maximizing-acquisition-functions-for-bayesian-optimization.pdf)\n  - 2018 | 可扩展的超参数迁移学习 | NeurIPS | [`PDF`](http:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F7917-scalable-hyperparameter-transfer-learning.pdf)\n  - 2016 | 基于鲁棒贝叶斯神经网络的贝叶斯优化 | Jost Tobias Springenberg 等 | NIPS | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F6117-bayesian-optimization-with-robust-bayesian-neural-networks.pdf)\n  - 2016 | 基于高斯过程专家乘积的可扩展超参数优化 | Nicolas Schilling 等 | PKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-46128-1_3)\n  - 2016 | 让人类脱离循环：贝叶斯优化综述 | Bobak Shahriari 等 | IEEE | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fdocument\u002F7352306\u002F)\n  - 2016 | 向自动调优的神经网络迈进 | Hector Mendoza 等 | JMLR | [`PDF`](http:\u002F\u002Faad.informatik.uni-freiburg.de\u002Fpapers\u002F16-AUTOML-AutoNet.pdf)\n  - 2016 | 用于自动超参数优化的两阶段迁移代理模型 | Martin Wistuba 等 | PKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-46128-1_13)\n  - 2015 | 高效且稳健的自动化机器学习 | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F5872-efficient-and-robust-automated-machine-learning.pdf)\n  - 2015 | 基于因子化多层感知机的超参数优化 | Nicolas Schilling 等 | PKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-23525-7_6)\n  - 2015 | 超参数搜索空间剪枝——序列模型为基础的超参数优化的新组件 | Martin Wistuba 等 | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=2991491)\n  - 2015 | 使用因子化多层感知机进行联合模型选择和超参数优化 | Nicolas Schilling 等 | ICTAI | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7372120\u002F)\n  - 2015 | 学习超参数优化的初始化 | Martin Wistuba 等 | DSAA | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7344817\u002F)\n  - 2015 | 使用深度神经网络进行可扩展的贝叶斯优化 | Jasper Snoek 等 | ACM | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=3045349)\n  - 2015 | 无模型的序列式超参数调优 | Martin Wistuba 等 | ICDM | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7373431\u002F)\n  - 2013 | Auto-WEKA：分类算法的组合选择与超参数优化 | [`PDF`](http:\u002F\u002Fwww.cs.ubc.ca\u002Flabs\u002Fbeta\u002FProjects\u002Fautoweka\u002Fpapers\u002Fautoweka.pdf)\n  - 2013 | 将模型搜索科学化：针对视觉架构的数百维超参数优化 | J. Bergstra | JMLR | [`PDF`](http:\u002F\u002Fproceedings.mlr.press\u002Fv28\u002Fbergstra13.pdf)\n  - 2012 | 机器学习算法的实用贝叶斯优化 | [`PDF`](https:\u002F\u002Fpapers.nips.cc\u002Fpaper\u002F4522-practical-bayesian-optimization-of-machine-learning-algorithms.pdf)\n  - 2011 | 基于序列模型的通用算法配置优化（扩展版） | [`PDF`](https:\u002F\u002Fwww.cs.ubc.ca\u002F~hutter\u002Fpapers\u002F10-TR-SMAC.pdf)\n+ #### 进化算法\n  - 2020 | Delta-STN：利用结构化响应雅可比矩阵实现神经网络的高效双层优化 | Juhan Bae、Roger Grosse | Neurips | [`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F2010.13514)\n  - 2018 | Autostacker：一种组合式进化学习系统 | Boyuan Chen 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1803.00684.pdf)\n  - 2017 | 大规模图像分类器的进化 | Esteban Real 等 | PMLR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1703.01041.pdf)\n  - 2016 | 通过基于树的管道优化自动化生物医学数据科学 | Randal S. Olson 等 | ECAL | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1601.07925.pdf)\n  - 2016 | 对用于自动化数据科学的基于树的管道优化工具的评估 | Randal S. Olson 等 | GECCO | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=2908918)\n+ #### 李普希茨函数\n  - 2017 | 李普希茨函数的全局优化 | C´edric Malherbe、Nicolas Vayatis | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1703.02628.pdf)\n+ #### 局部搜索\n  - 2009 | ParamILS：自动算法配置框架 | Frank Hutter 等 | JAIR | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1401.3492.pdf)\n+ #### 元学习\n  - 2019 | OBOE：面向AutoML模型选择的协同过滤 | Chengrun Yang 等 | KDD | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3292500.3330909) \n  - 2019 | SMARTML：基于元学习的机器学习算法自动选择与超参数调优框架 | [`PDF`](http:\u002F\u002Fopenproceedings.org\u002F2019\u002Fconf\u002Fedbt\u002FEDBT19_paper_235.pdf)\n  - 2008 | 关于算法选择中元学习的跨学科视角 | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=1456656)\n+ #### 粒子群优化\n  - 2017 | 粒子群优化在深度神经网络超参数选择中的应用 | Pablo Ribalta Lorenzo 等 | GECCO | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=3071208)\n  - 2008 | 粒子群优化在支持向量机参数确定与特征选择中的应用 | Shih-Wei Lin 等 | Expert Systems with Applications | [`PDF`](http:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS0957417407003752)\n+ #### 随机搜索\n  - 2016 | Hyperband：一种新颖的基于赌博机的超参数优化方法 | Lisha Li 等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1603.06560.pdf)\n  - 2012 | 超参数优化中的随机搜索 | James Bergstra、Yoshua Bengio | JMLR | [`PDF`](http:\u002F\u002Fwww.jmlr.org\u002Fpapers\u002Fvolume13\u002Fbergstra12a\u002Fbergstra12a.pdf)\n  - 2011 | 超参数优化的算法 | James Bergstra 等 | NIPS | [`PDF`](https:\u002F\u002Fdl.acm.org\u002Fcitation.cfm?id=2986743)\n+ #### 迁移学习\n  - 2016 | 用于自动超参数调优的高效迁移学习方法 | Dani Yogatama、Gideon Mann | JMLR | [`PDF`](https:\u002F\u002Fpdfs.semanticscholar.org\u002F75f2\u002F6734972ebaffc6b43d45abd3048ef75f15a5.pdf)\n  - 2016 | 贝叶斯优化的灵活迁移学习框架 | Tinu Theckel Joy 等 | PAKDD | [`PDF`](https:\u002F\u002Flink.springer.com\u002Fchapter\u002F10.1007\u002F978-3-319-31753-3_9)\n  - 2016 | 超参数优化机器 | Martin Wistuba 等 | DSAA | [`PDF`](http:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F7796889\u002F)\n  - 2013 | 协作式超参数调优 | R´emi Bardenet 等 | ICML | [`PDF`](http:\u002F\u002Fproceedings.mlr.press\u002Fv28\u002Fbardenet13.pdf)\n\n### 杂项\n- 2020年 | 面向数据流的自动化机器学习技术 | Alexandru-Ionut Imbrea | [`PDF`](https:\u002F\u002Fgithub.com\u002FAlexImb\u002Fautoml-streams-research-paper\u002Fraw\u002Fmaster\u002FA_Imbrea_AutoML_Data_Streams.pdf)\n- 2018年 | 利用性能预测加速神经架构搜索 | Bowen Baker等 | ICLR | [`PDF`](https:\u002F\u002Fopenreview.net\u002Fpdf?id=BJypUGZ0Z)\n- 2017年 | 自动“弗兰肯斯坦”：自主创建复杂集成模型 | Martin Wistuba等 | SIAM | [`PDF`](http:\u002F\u002Fepubs.siam.org\u002Fdoi\u002Fpdf\u002F10.1137\u002F1.9781611974973.83)\n- 2018年 | 分类数据集的特征刻画：元学习中的元特征研究 | Rivolli, Adriano等 | arXiv | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1808.10406.pdf)\n- 2020年 | 让人类重新回到AutoML循环中 | Xanthopoulos, Iordanis等 | EDBT\u002FICDT | [`PDF`](http:\u002F\u002Fceur-ws.org\u002FVol-2578\u002FETMLP5.pdf)\n\n# 教程\n### 贝叶斯优化\n+ 2018年 | 贝叶斯优化教程。| [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1807.02811.pdf)\n+ 2010年 | 关于昂贵目标函数的贝叶斯优化教程，及其在主动用户建模和层次强化学习中的应用 | [`PDF`](https:\u002F\u002Farxiv.org\u002Fpdf\u002F1012.2599v1.pdf)\n### 元学习\n+ 2008年 | 元学习——教程 | [`PDF`](https:\u002F\u002Fpdfs.semanticscholar.org\u002F5794\u002F1a4891f673cadf06fba02419372aad85c3bb.pdf)\n\n\n# 博客\n| 类型       | 博客标题 | 链接       |\n| :--------: | :--------: | :--------: |\n| HPO        | 贝叶斯优化用于超参数调优 | [`链接`](https:\u002F\u002Farimo.com\u002Fdata-science\u002F2016\u002Fbayesian-optimization-hyperparameter-tuning\u002F) |\n| 元学习     | 学会学习 | [`链接`](http:\u002F\u002Fbair.berkeley.edu\u002Fblog\u002F2017\u002F07\u002F18\u002Flearning-to-learn\u002F) |\n| 元学习     | 为什么元学习对人工智能的进一步发展至关重要？ | [`链接`](https:\u002F\u002Fchatbotslife.com\u002Fwhy-meta-learning-is-crucial-for-further-advances-of-artificial-intelligence-c2df55959adf) |\n\n\n# 书籍\n| 出版年份     | 类型    | 书名 | 作者    | 出版社  | 链接       |\n| :--------: | :--------: | :--------: | :--------: | :--------: | :--------: |\n| 2009年       | 元学习 | 元学习——数据挖掘的应用 | Brazdil, P., Giraud Carrier, C., Soares, C., Vilalta, R. | Springer | [`下载`](http:\u002F\u002Fwww.springer.com\u002Fla\u002Fbook\u002F9783540732624) |\n| 2019年       | HPO、元学习、NAS | AutoML：方法、系统、挑战 | Frank Hutter, Lars Kotthoff, Joaquin Vanschoren |         | [`下载`](https:\u002F\u002Fwww.automl.org\u002Fbook\u002F) |\n| 2021年       | 学习 | 自动化机器学习实战 | Qinquan Song, Haifeng Jin, Xia Hu | Manning Publications        | [`下载`](https:\u002F\u002Fwww.manning.com\u002Fbooks\u002Fautomated-machine-learning-in-action) |\n\n\n# 视频\n| 标题    | 作者    | 链接     |\n| AutoML基础：自动化机器学习实战 | Qinquan Song, Haifeng Jin, Xia Hu | (https:\u002F\u002Fwww.youtube.com\u002Fwatch?v=9KpieG0B7VM) |\n\n# 项目\n| 项目       | 类型       | 编程语言   | 许可证     | 链接       |\n| :--------: | :--------: | :--------: | :--------: | :--------: |\n| AdaNet     | NAS        | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Ftensorflow\u002Fadanet) |\n| Advisor    | HPO        | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Ftobegit3hub\u002Fadvisor) |\n| AMLA       | HPO, NAS   | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002FCiscoAI\u002Famla) |\n| ATM        | HPO        | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FHDI-Project\u002FATM) |\n| Auger      | HPO        | Python     | 商业许可   | [`官网`](https:\u002F\u002Fauger.ai) |\n| auptimizer | HPO, NAS   | Python（支持R脚本） | GPL-3.0  | [`Github`](https:\u002F\u002Fgithub.com\u002FLGE-ARC-AdvancedAI\u002Fauptimizer) |\n| Auto-Keras | NAS        | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fkeras-team\u002Fautokeras\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fkeras-team\u002Fautokeras) |\n| AutoML Vision | NAS     | Python     | 商业许可   | [`官网`](https:\u002F\u002Fcloud.google.com\u002Fvision\u002F) |\n| AutoML Video Intelligence            | NAS        | Python    | 商业许可   | [`官网`](https:\u002F\u002Fcloud.google.com\u002Fvideo-intelligence\u002F) |\n| AutoML Natural Language | NAS        | Python     | 商业许可   | [`官网`](https:\u002F\u002Fcloud.google.com\u002Fnatural-language\u002F) |\n| AutoML Translation      | NAS        | Python     | 商业许可   | [`官网`](https:\u002F\u002Fcloud.google.com\u002Ftranslate\u002F) |\n| AutoML Tables           | AutoFE, HPO             | Python    | 商业许可   | [`官网`](https:\u002F\u002Fcloud.google.com\u002Fautoml-tables\u002F) |\n| AutoPyTorch | HPO, NAS   | Python    | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FAuto-PyTorch) |\n| HyperGBM                | HPO| Python    | Python | [`Github`](https:\u002F\u002Fgithub.com\u002FDataCanvasIO\u002FHyperGBM\u002F) |\n| HyperKeras              | NAS        | Python    | Python | [`Github`](https:\u002F\u002Fgithub.com\u002FDataCanvasIO\u002FHyperKeras\u002F) |\n| Hypernets               | HPO, NAS   | Python    | Python | [`Github`](https:\u002F\u002Fgithub.com\u002FDataCanvasIO\u002FHypernets\u002F) |\n| auto-sklearn | HPO      | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fautoml\u002Fauto-sklearn\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002Fauto-sklearn) |\n| auto_ml     | HPO       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FClimbsRocks\u002Fauto_ml) |\n| BayesianOptimization | HPO | Python  | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Ffmfn\u002FBayesianOptimization) |\n| BayesOpt    | HPO       | C++        | AGPL-3.0   | [`Github`](https:\u002F\u002Fgithub.com\u002Frmcantin\u002Fbayesopt) |\n| comet       | HPO       | Python     | 商业许可   | [`官网`](https:\u002F\u002Fwww.comet.ml) |\n| DataRobot   | HPO       | Python     | 商业许可   | [`官网`](https:\u002F\u002Fwww.datarobot.com\u002F) |\n| DEvol       | NAS       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Fjoeddav\u002Fdevol) |\n| DeepArchitect | NAS     | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Fnegrinho\u002Fdeep_architect) |\n| Determined | HPO, NAS   | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fdetermined-ai\u002Fdetermined) |\n| Driverless AI | AutoFE  | Python     | 商业许可   | [`官网`](https:\u002F\u002Fwww.h2o.ai\u002Fproducts\u002Fh2o-driverless-ai\u002F) |\n| FAR-HO      | HPO       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Flucfra\u002FFAR-HO) |\n| H2O AutoML  | HPO       | Python, R, Java, Scala | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fh2oai\u002Fh2o-3\u002F) |\n| HpBandSter  | HPO       | Python     | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FHpBandSter) |\n| HyperBand   | HPO       | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fzygmuntz\u002Fhyperband\u002Fblob\u002Fmaster\u002FLICENSE) | [`Github`](https:\u002F\u002Fgithub.com\u002Fzygmuntz\u002Fhyperband) |\n| Hyperopt    | HPO       | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt) |\n| Hyperopt-sklearn | HPO  | Python    | [`许可证`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt-sklearn\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fhyperopt\u002Fhyperopt-sklearn) |\n| Hyperparameter Hunter | HPO | Python | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FHunterMcGushion\u002Fhyperparameter_hunter) |\n| Katib       | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fkubeflow\u002Fkatib) |\n| MateLabs    | HPO       | Python     | 商业许可   | [`Github`](http:\u002F\u002Fmatelabs.in\u002F) |\n| Milano      | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002FNVIDIA\u002FMilano) |\n| MLJAR       | AutoFE, HPO, NAS       | Python     | MIT | [`Github`](https:\u002F\u002Fgithub.com\u002Fmljar\u002Fmljar-supervised) |\n| mlr3automl       | HPO       | R     | LGPL-3.0 | [`GitHub`](https:\u002F\u002Fgithub.com\u002Fa-hanf\u002Fmlr3automl) |\n| nasbot      | NAS       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Fkirthevasank\u002Fnasbot) |\n| neptune     | HPO       | Python     | 商业许可   | [`官网`](https:\u002F\u002Fneptune.ml\u002F) |\n| NNI         | HPO, NAS  | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002FMicrosoft\u002Fnni) |\n| Oboe    | HPO       | Python     | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fudellgroup\u002Foboe) |\n| Optunity    | HPO       | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fclaesenm\u002Foptunity\u002Fblob\u002Fmaster\u002FLICENSE.txt) | [`Github`](https:\u002F\u002Fgithub.com\u002Fclaesenm\u002Foptunity) |\n| R2.ai       | HPO       |            | 商业许可   | [`官网`](https:\u002F\u002Fr2.ai\u002F) |\n| RBFOpt      | HPO       | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fcoin-or\u002Frbfopt\u002Fblob\u002Fmaster\u002FLICENSE) | [`Github`](https:\u002F\u002Fgithub.com\u002Fcoin-or\u002Frbfopt) |\n| RoBO        | HPO       | Python     | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FRoBO) |\n| Scikit-Optimize | HPO   | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fscikit-optimize\u002Fscikit-optimize\u002Fblob\u002Fmaster\u002FLICENSE.md) | [`Github`](https:\u002F\u002Fgithub.com\u002Fscikit-optimize\u002Fscikit-optimize) |\n| SigOpt      | HPO       | Python     | 商业许可   | [`官网`](https:\u002F\u002Fsigopt.com\u002F) |\n| SMAC3       | HPO       | Python     | [`许可证`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FSMAC3\u002Fblob\u002Fmaster\u002FLICENSE) | [`Github`](https:\u002F\u002Fgithub.com\u002Fautoml\u002FSMAC3) |\n| TPOT        | AutoFE, HPO | Python   | LGPL-3.0   | [`Github`](https:\u002F\u002Fgithub.com\u002Frhiever\u002Ftpot) |\n| TransmogrifAI | HPO     | Scala      | BSD-3-Clause | [`Github`](https:\u002F\u002Fgithub.com\u002Fsalesforce\u002FTransmogrifAI) |\n| Tune        | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Fray-project\u002Fray\u002Ftree\u002Fmaster\u002Fpython\u002Fray\u002Ftune) |\n| Xcessiv     | HPO       | Python     | Apache-2.0 | [`Github`](https:\u002F\u002Fgithub.com\u002Freiinakano\u002Fxcessiv) |\n| SmartML     | HPO       | R          | GPL-3.0    | [`Github`](https:\u002F\u002Fgithub.com\u002FDataSystemsGroupUT\u002FSmartML) |\n| MLBox     | AutoFE, HPO    | Python          | BSD-3 License    | [`Github`](https:\u002F\u002Fgithub.com\u002FAxeldeRomblay\u002FMLBox) |\n| AutoAI Watson     | AutoFE, HPO    |      | 商业许可    | [`官网`](https:\u002F\u002Fwww.ibm.com\u002Fcloud\u002Fwatson-studio\u002Fautoai) |\n| AUtoML    | AutoML | Python     | MIT    | [`Github`](https:\u002F\u002Fgithub.com\u002FWestern-OC2-Lab\u002FAutoML-Implementation-for-Static-and-Dynamic-Data-Analytics) |\n| Optuna      | HPO       | Python     | MIT        | [`Github`](https:\u002F\u002Fgithub.com\u002Foptuna\u002Foptuna) |\n\n# 幻灯片\n| 类型    | 幻灯片标题 | 作者    | 链接       |\n| :--------: | :--------: | :--------: | :--------: |\n| AutoFE     | 预测建模的自动化特征工程 | Udyan Khurana 等 | [`下载`](https:\u002F\u002Fgithub.com\u002Fhibayesian\u002Fawesome-automl-papers\u002Fblob\u002Fmaster\u002Fresources\u002Fslides\u002F%5Bslides%5D-automated-feature-engineering-for-predictive-modeling.pdf) |\n| HPO     | 机器学习中的贝叶斯优化教程 | Ryan P. Adams | [`下载`](https:\u002F\u002Fgithub.com\u002Fhibayesian\u002Fawesome-automl-papers\u002Fblob\u002Fmaster\u002Fresources\u002Fslides\u002F%5Bslides%5D-a-tutorial-on-bayesian-optimization-for-machine-learning.pdf) |\n| HPO     | 贝叶斯优化 | Gilles Louppe | [`下载`](https:\u002F\u002Fgithub.com\u002Fhibayesian\u002Fawesome-automl-papers\u002Fblob\u002Fmaster\u002Fresources\u002Fslides\u002F%5Bslides%5D-bayesian-optimisation.pdf) |\n\n\n# 致谢\n特别感谢所有为本项目做出贡献的人。\n\n| 姓名       | 简介        |\n| :--------: | :--------: |\n| [Alexander Robles](https:\u002F\u002Fgithub.com\u002FAlro10) | 巴西坎皮纳斯大学博士生 |\n| [derekflint](https:\u002F\u002Fgithub.com\u002Fderekflint) | |\n| [endymecy](https:\u002F\u002Fgithub.com\u002Fendymecy) | 腾讯高级研究员 |\n| [Eric](https:\u002F\u002Fgithub.com\u002FffengENG) | |\n| [Erin LeDell](https:\u002F\u002Fgithub.com\u002Fledell) | H2O.ai 首席机器学习科学家 |\n| [fwcore](https:\u002F\u002Fgithub.com\u002Ffwcore) | |\n| [Gaurav Mittal](https:\u002F\u002Fgithub.com\u002Fg1910) | |\n| [Hernan Ceferino Vazquez](https:\u002F\u002Fgithub.com\u002Fhcvazquez) | 博士，MercadoLibre 数据科学专家 |\n| [Kaustubh Damania](https:\u002F\u002Fgithub.com\u002FKaustubhDamania) | |\n| [Lilian Besson](https:\u002F\u002Fgithub.com\u002Fnaereen) | CentraleSupélec 博士生 |\n| [罗磊](https:\u002F\u002Fgithub.com\u002Fluoleicn) | |\n| [Marc](https:\u002F\u002Fgithub.com\u002FEnnosigaeon) | |\n| [Mohamed Maher](https:\u002F\u002Fgithub.com\u002Fmmaher22) | |\n| [Neil Conway](https:\u002F\u002Fgithub.com\u002Fneilconway) | Determined AI 首席技术官 |\n| [Richard Liaw](https:\u002F\u002Fgithub.com\u002Frichardliaw) | 加州大学伯克利分校博士生 |\n| [Randy Olson](https:\u002F\u002Fgithub.com\u002Frhiever) | LifeEGX 首席数据科学家 |\n| [Slava Kurilyak](https:\u002F\u002Fgithub.com\u002Fslavakurilyak) | Produvia 创始人兼首席执行官 |\n| [Saket Maheshwary](https:\u002F\u002Fgithub.com\u002Fsaket-maheshwary) | 人工智能研究员 |\n| [shaido987](https:\u002F\u002Fgithub.com\u002Fshaido987) | |\n| [sophia-wright-blue](https:\u002F\u002Fgithub.com\u002Fsophia-wright-blue) | |\n| [tengben0905](https:\u002F\u002Fgithub.com\u002Ftengben0905) | |\n| [xuehui](https:\u002F\u002Fgithub.com\u002Fxuehui1991) | 微软员工 |\n| [Yihui He](https:\u002F\u002Fgithub.com\u002Fyihui-he) | 卡内基梅隆大学研究生 |\n\n\n# 联系与反馈\n如果您有任何建议（例如缺少的论文、新论文、关键研究人员或错别字），欢迎提交 Pull Request。您也可以发送邮件至：\n+ Mark Lin (hibayesian@gmail.com)。\n\n\n# 许可证\nAwesome-AutoML-Papers 采用 Apache License 2.0 许可证。","# Awesome-AutoML-Papers 快速上手指南\n\n`awesome-automl-papers` 并非一个可安装的软件库或框架，而是一个**精选的自动化机器学习（AutoML）学术资源清单**。它汇集了该领域的论文、教程、幻灯片和项目链接。因此，本指南旨在指导开发者如何高效地浏览、检索和利用这些资源，而非执行传统的软件安装流程。\n\n## 环境准备\n\n由于本项目本质是一个文档仓库，对环境要求极低：\n*   **系统要求**：任何支持现代浏览器的操作系统（Windows, macOS, Linux）。\n*   **前置依赖**：\n    *   Web 浏览器（推荐 Chrome, Edge 或 Firefox）。\n    *   （可选）Git：如果你希望将仓库克隆到本地进行离线阅读或贡献。\n    *   （可选）PDF 阅读器：用于查看链接中的学术论文。\n\n## 获取资源\n\n你可以通过以下两种方式访问资源列表：\n\n### 方式一：在线浏览（推荐）\n直接访问 GitHub 仓库页面，利用浏览器的搜索功能查找感兴趣的主题。\n*   **仓库地址**: `https:\u002F\u002Fgithub.com\u002F...\u002Fawesome-automl-papers` (请在 GitHub 搜索该项目名称)\n*   **国内加速**: 如果访问 GitHub 较慢，可使用国内镜像站（如 Gitee 上的同步仓库）或配合加速工具访问。\n\n### 方式二：本地克隆\n如果你希望离线阅读或跟踪更新，可以使用 Git 克隆仓库：\n\n```bash\ngit clone https:\u002F\u002Fgithub.com\u002F...\u002Fawesome-automl-papers.git\ncd awesome-automl-papers\n```\n\n*注：请将上述 URL 替换为实际的仓库地址。*\n\n## 基本使用\n\n本项目的核心用法是通过目录结构定位所需的 AutoML 技术文献。以下是使用示例：\n\n### 1. 按技术领域查找\n打开 `README.md` 文件（或在 GitHub 网页端查看），利用 **Table of Contents** 快速跳转。项目主要涵盖以下核心领域：\n\n*   **自动特征工程 (Automated Feature Engineering)**: 查找关于 `Expand Reduce`, `Meta Learning`, `Reinforcement Learning` 在特征生成中的应用论文。\n*   **神经架构搜索 (Neural Architecture Search, NAS)**: 查阅基于 `Evolutionary Algorithms` (进化算法), `Reinforcement Learning` (强化学习), 或 `DARTS` (可微分架构搜索) 的最新研究。\n*   **超参数优化 (Hyperparameter Optimization, HPO)**: 获取关于 `Bayesian Optimization` (贝叶斯优化), `Random Search` 等算法的理论与实践论文。\n*   **综述文章 (Surveys)**: 新手建议优先阅读 `Surveys` 章节下的论文（如 2019-2024 年的综述），以建立对 AutoML 全局的认识。\n\n### 2. 获取论文全文\n在列表中找到感兴趣的条目后，点击对应的链接获取资源：\n*   **[`PDF`](link)**: 直接下载论文 PDF 文件（通常指向 arXiv 或会议官网）。\n*   **[`Springer`](link)** \u002F **[`IEEE`](link)**: 跳转至出版商页面（可能需要机构权限）。\n*   **[`GitHub`](link)**: 跳转至论文对应的开源代码实现项目。\n\n### 3. 实战结合示例\n假设你想了解 **DARTS (Differentiable Architecture Search)** 算法：\n1.  在目录中找到 **Architecture Search** -> **Continuous Optimization**。\n2.  定位到条目：`2019 | DARTS: Differentiable Architecture Search | Hanxiao Liu, et al. | ICLR`。\n3.  点击 **[`PDF`](https:\u002F\u002Farxiv.org\u002Fabs\u002F1806.09055)** 阅读原始论文。\n4.  通常在论文摘要或相关项目列表中会附带代码链接，可进一步寻找其开源实现进行复现。\n\n### 4. 关注商业与框架对比\n参考 README 中的表格，对比各大公司（如 Google, Microsoft, 百度，阿里等）在 **AutoFE** (自动特征工程), **HPO**, **NAS** 方面的支持情况，从而选择适合你项目需求的现有 AutoML 框架（如 Auto-Keras, Google Vizier 等）进行深入调研。","某电商初创公司的数据分析师团队正试图构建一个高精度的用户流失预测模型，但团队中缺乏深厚的机器学习算法专家。\n\n### 没有 awesome-automl-papers 时\n- **文献检索如大海捞针**：团队成员需在 Google Scholar 和 arXiv 上手动搜索海量关键词，难以区分过时的方法与最新的 SOTA（最先进）技术，耗费大量时间在无效阅读上。\n- **技术选型盲目试错**：面对自动特征工程、超参数优化和神经架构搜索等众多子领域，团队不清楚哪些组合最适合当前的表格数据场景，导致模型迭代方向混乱。\n- **复现成本高昂**：找到的论文往往缺少开源代码或详细教程，团队需从零推导公式并编写底层代码，原本两周的验证周期被拉长至两个月。\n- **视野局限错失良机**：由于无法系统性地跟踪前沿进展，团队可能还在使用三年前的旧算法，而错过了能显著提升准确率的最新元学习策略。\n\n### 使用 awesome-automl-papers 后\n- **一站式获取精选资源**：直接查阅该仓库中分类整理好的综述、论文和项目链接，迅速锁定针对“表格数据自动建模”的最新核心文献，检索效率提升十倍。\n- **清晰的技术路线图**：通过仓库对 AutoFE、HPO、NAS 等模块的结构化梳理，团队快速确定了“自动特征工程 + 贝叶斯超参数优化”的最佳实践路径。\n- **加速落地与复现**：利用列表中附带的教程、幻灯片及开源项目地址，团队直接复用成熟的代码框架进行微调，将模型验证周期压缩回三天内。\n- **同步行业最前沿**：借助社区持续更新的机制，团队能即时掌握各大科技公司（如 Google、Microsoft）的最新动向，确保技术方案始终处于行业领先水平。\n\nawesome-automl-papers 将分散的学术成果转化为可执行的技术地图，让非专家团队也能以最低成本驾驭自动化机器学习的强大能力。","https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fhibayesian_awesome-automl-papers_1ab9bcac.png","hibayesian","Mark Lin","https:\u002F\u002Foss.gittoolsai.com\u002Favatars\u002Fhibayesian_b5d5e986.jpg","Applied Algorithm Expert",null,"Shanghai, China","hibayesian@gmail.com","https:\u002F\u002Fhibayesian.github.io\u002F","https:\u002F\u002Fgithub.com\u002Fhibayesian",4150,682,"2026-04-15T10:35:55","Apache-2.0",1,"","未说明",{"notes":89,"python":87,"dependencies":90},"该项目是一个自动机器学习（AutoML）领域的论文、文章、教程和项目清单仓库，不包含可执行的源代码或软件工具，因此没有具体的运行环境、依赖库或硬件需求。用户只需通过浏览器阅读列表中的链接即可。",[],[14],[93,94,95,96],"automl","automated-feature-engineering","hyperparameter-optimization","neural-architecture-search","2026-03-27T02:49:30.150509","2026-04-19T15:40:01.741600",[],[]]