[{"data":1,"prerenderedAt":-1},["ShallowReactive",2],{"similar-qingsongedu--time-series-transformers-review":3,"tool-qingsongedu--time-series-transformers-review":61},[4,18,26,36,44,53],{"id":5,"name":6,"github_repo":7,"description_zh":8,"stars":9,"difficulty_score":10,"last_commit_at":11,"category_tags":12,"status":17},4358,"openclaw","openclaw\u002Fopenclaw","OpenClaw 是一款专为个人打造的本地化 AI 助手，旨在让你在自己的设备上拥有完全可控的智能伙伴。它打破了传统 AI 助手局限于特定网页或应用的束缚，能够直接接入你日常使用的各类通讯渠道，包括微信、WhatsApp、Telegram、Discord、iMessage 等数十种平台。无论你在哪个聊天软件中发送消息，OpenClaw 都能即时响应，甚至支持在 macOS、iOS 和 Android 设备上进行语音交互，并提供实时的画布渲染功能供你操控。\n\n这款工具主要解决了用户对数据隐私、响应速度以及“始终在线”体验的需求。通过将 AI 部署在本地，用户无需依赖云端服务即可享受快速、私密的智能辅助，真正实现了“你的数据，你做主”。其独特的技术亮点在于强大的网关架构，将控制平面与核心助手分离，确保跨平台通信的流畅性与扩展性。\n\nOpenClaw 非常适合希望构建个性化工作流的技术爱好者、开发者，以及注重隐私保护且不愿被单一生态绑定的普通用户。只要具备基础的终端操作能力（支持 macOS、Linux 及 Windows WSL2），即可通过简单的命令行引导完成部署。如果你渴望拥有一个懂你",349277,3,"2026-04-06T06:32:30",[13,14,15,16],"Agent","开发框架","图像","数据工具","ready",{"id":19,"name":20,"github_repo":21,"description_zh":22,"stars":23,"difficulty_score":10,"last_commit_at":24,"category_tags":25,"status":17},3808,"stable-diffusion-webui","AUTOMATIC1111\u002Fstable-diffusion-webui","stable-diffusion-webui 是一个基于 Gradio 构建的网页版操作界面，旨在让用户能够轻松地在本地运行和使用强大的 Stable Diffusion 图像生成模型。它解决了原始模型依赖命令行、操作门槛高且功能分散的痛点，将复杂的 AI 绘图流程整合进一个直观易用的图形化平台。\n\n无论是希望快速上手的普通创作者、需要精细控制画面细节的设计师，还是想要深入探索模型潜力的开发者与研究人员，都能从中获益。其核心亮点在于极高的功能丰富度：不仅支持文生图、图生图、局部重绘（Inpainting）和外绘（Outpainting）等基础模式，还独创了注意力机制调整、提示词矩阵、负向提示词以及“高清修复”等高级功能。此外，它内置了 GFPGAN 和 CodeFormer 等人脸修复工具，支持多种神经网络放大算法，并允许用户通过插件系统无限扩展能力。即使是显存有限的设备，stable-diffusion-webui 也提供了相应的优化选项，让高质量的 AI 艺术创作变得触手可及。",162132,"2026-04-05T11:01:52",[14,15,13],{"id":27,"name":28,"github_repo":29,"description_zh":30,"stars":31,"difficulty_score":32,"last_commit_at":33,"category_tags":34,"status":17},1381,"everything-claude-code","affaan-m\u002Feverything-claude-code","everything-claude-code 是一套专为 AI 编程助手（如 Claude Code、Codex、Cursor 等）打造的高性能优化系统。它不仅仅是一组配置文件，而是一个经过长期实战打磨的完整框架，旨在解决 AI 代理在实际开发中面临的效率低下、记忆丢失、安全隐患及缺乏持续学习能力等核心痛点。\n\n通过引入技能模块化、直觉增强、记忆持久化机制以及内置的安全扫描功能，everything-claude-code 能显著提升 AI 在复杂任务中的表现，帮助开发者构建更稳定、更智能的生产级 AI 代理。其独特的“研究优先”开发理念和针对 Token 消耗的优化策略，使得模型响应更快、成本更低，同时有效防御潜在的攻击向量。\n\n这套工具特别适合软件开发者、AI 研究人员以及希望深度定制 AI 工作流的技术团队使用。无论您是在构建大型代码库，还是需要 AI 协助进行安全审计与自动化测试，everything-claude-code 都能提供强大的底层支持。作为一个曾荣获 Anthropic 黑客大奖的开源项目，它融合了多语言支持与丰富的实战钩子（hooks），让 AI 真正成长为懂上",160784,2,"2026-04-19T11:32:54",[14,13,35],"语言模型",{"id":37,"name":38,"github_repo":39,"description_zh":40,"stars":41,"difficulty_score":32,"last_commit_at":42,"category_tags":43,"status":17},2271,"ComfyUI","Comfy-Org\u002FComfyUI","ComfyUI 是一款功能强大且高度模块化的视觉 AI 引擎，专为设计和执行复杂的 Stable Diffusion 图像生成流程而打造。它摒弃了传统的代码编写模式，采用直观的节点式流程图界面，让用户通过连接不同的功能模块即可构建个性化的生成管线。\n\n这一设计巧妙解决了高级 AI 绘图工作流配置复杂、灵活性不足的痛点。用户无需具备编程背景，也能自由组合模型、调整参数并实时预览效果，轻松实现从基础文生图到多步骤高清修复等各类复杂任务。ComfyUI 拥有极佳的兼容性，不仅支持 Windows、macOS 和 Linux 全平台，还广泛适配 NVIDIA、AMD、Intel 及苹果 Silicon 等多种硬件架构，并率先支持 SDXL、Flux、SD3 等前沿模型。\n\n无论是希望深入探索算法潜力的研究人员和开发者，还是追求极致创作自由度的设计师与资深 AI 绘画爱好者，ComfyUI 都能提供强大的支持。其独特的模块化架构允许社区不断扩展新功能，使其成为当前最灵活、生态最丰富的开源扩散模型工具之一，帮助用户将创意高效转化为现实。",109154,"2026-04-18T11:18:24",[14,15,13],{"id":45,"name":46,"github_repo":47,"description_zh":48,"stars":49,"difficulty_score":32,"last_commit_at":50,"category_tags":51,"status":17},6121,"gemini-cli","google-gemini\u002Fgemini-cli","gemini-cli 是一款由谷歌推出的开源 AI 命令行工具，它将强大的 Gemini 大模型能力直接集成到用户的终端环境中。对于习惯在命令行工作的开发者而言，它提供了一条从输入提示词到获取模型响应的最短路径，无需切换窗口即可享受智能辅助。\n\n这款工具主要解决了开发过程中频繁上下文切换的痛点，让用户能在熟悉的终端界面内直接完成代码理解、生成、调试以及自动化运维任务。无论是查询大型代码库、根据草图生成应用，还是执行复杂的 Git 操作，gemini-cli 都能通过自然语言指令高效处理。\n\n它特别适合广大软件工程师、DevOps 人员及技术研究人员使用。其核心亮点包括支持高达 100 万 token 的超长上下文窗口，具备出色的逻辑推理能力；内置 Google 搜索、文件操作及 Shell 命令执行等实用工具；更独特的是，它支持 MCP（模型上下文协议），允许用户灵活扩展自定义集成，连接如图像生成等外部能力。此外，个人谷歌账号即可享受免费的额度支持，且项目基于 Apache 2.0 协议完全开源，是提升终端工作效率的理想助手。",100752,"2026-04-10T01:20:03",[52,13,15,14],"插件",{"id":54,"name":55,"github_repo":56,"description_zh":57,"stars":58,"difficulty_score":32,"last_commit_at":59,"category_tags":60,"status":17},4721,"markitdown","microsoft\u002Fmarkitdown","MarkItDown 是一款由微软 AutoGen 团队打造的轻量级 Python 工具，专为将各类文件高效转换为 Markdown 格式而设计。它支持 PDF、Word、Excel、PPT、图片（含 OCR）、音频（含语音转录）、HTML 乃至 YouTube 链接等多种格式的解析，能够精准提取文档中的标题、列表、表格和链接等关键结构信息。\n\n在人工智能应用日益普及的今天，大语言模型（LLM）虽擅长处理文本，却难以直接读取复杂的二进制办公文档。MarkItDown 恰好解决了这一痛点，它将非结构化或半结构化的文件转化为模型“原生理解”且 Token 效率极高的 Markdown 格式，成为连接本地文件与 AI 分析 pipeline 的理想桥梁。此外，它还提供了 MCP（模型上下文协议）服务器，可无缝集成到 Claude Desktop 等 LLM 应用中。\n\n这款工具特别适合开发者、数据科学家及 AI 研究人员使用，尤其是那些需要构建文档检索增强生成（RAG）系统、进行批量文本分析或希望让 AI 助手直接“阅读”本地文件的用户。虽然生成的内容也具备一定可读性，但其核心优势在于为机器",93400,"2026-04-06T19:52:38",[52,14],{"id":62,"github_repo":63,"name":64,"description_en":65,"description_zh":66,"ai_summary_zh":66,"readme_en":67,"readme_zh":68,"quickstart_zh":69,"use_case_zh":70,"hero_image_url":71,"owner_login":72,"owner_name":73,"owner_avatar_url":74,"owner_bio":75,"owner_company":76,"owner_location":77,"owner_email":78,"owner_twitter":72,"owner_website":79,"owner_url":80,"languages":78,"stars":81,"forks":82,"last_commit_at":83,"license":84,"difficulty_score":85,"env_os":86,"env_gpu":87,"env_ram":87,"env_deps":88,"category_tags":91,"github_topics":92,"view_count":32,"oss_zip_url":78,"oss_zip_packed_at":78,"status":17,"created_at":107,"updated_at":108,"faqs":109,"releases":110},9638,"qingsongedu\u002Ftime-series-transformers-review","time-series-transformers-review","A professionally curated list of awesome resources (paper, code, data, etc.) on transformers in time series.","time-series-transformers-review 是一个专注于时间序列领域 Transformer 技术的专业资源汇总库。它系统地收集并整理了相关的学术论文、开源代码、数据集等优质资料，旨在解决该领域技术迭代快、资源分散且难以全面追踪的痛点。作为首个对时间序列 Transformer 建模进展进行全方位梳理的项目，它不仅提供了一份详尽的分类清单，还涵盖了从基础理论到前沿应用（如时间序列预测）的最新成果，包括 ICLR、NeurIPS 等顶会的热门研究。\n\n该项目特别适合人工智能研究人员、数据科学家以及从事时序分析的开发者使用。对于希望快速了解行业动态、寻找实验基线或深入探究特定算法原理的用户而言，这里提供了极高价值的入门指引和参考坐标。其独特亮点在于拥有清晰的学术分类体系（Taxonomy），并配套了发表在 IJCAI 上的权威综述论文，帮助用户建立系统化的知识框架。社区保持活跃更新，欢迎用户共同贡献最新资源，是探索时间序列与 Transformer 结合趋势的理想起点。","# Transformers in Time Series  \n\n[![Awesome](https:\u002F\u002Fawesome.re\u002Fbadge.svg)](https:\u002F\u002Fawesome.re) \n![PRs Welcome](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FPRs-Welcome-green) \n![Stars](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fqingsongedu\u002Ftime-series-transformers-review)\n[![Visits Badge](https:\u002F\u002Fbadges.pufler.dev\u002Fvisits\u002Fqingsongedu\u002Ftime-series-transformers-review)](https:\u002F\u002Fbadges.pufler.dev\u002Fvisits\u002Fqingsongedu\u002Ftime-series-transformers-review)\n\u003C!-- ![Forks](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fforks\u002Fqingsongedu\u002Ftime-series-transformers-review) -->\n\n\nA professionally curated list of awesome resources (paper, code, data, etc.) on **Transformers in Time Series**, which is first work to comprehensively and systematically summarize the recent advances of Transformers for modeling time series data to the best of our knowledge.\n\nWe will continue to update this list with newest resources. If you found any missed resources (paper\u002Fcode) or errors, please feel free to open an issue or make a pull request.\n\nFor general **AI for Time Series (AI4TS)** Papers, Tutorials, and Surveys at the **Top AI Conferences and Journals**, please check [This Repo](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Fawesome-AI-for-time-series-papers). \n\nFor general **Recent AI Advances: Tutorials and Surveys in various areas (DL, ML, DM, CV, NLP, Speech, etc.)** at the **Top AI Conferences and Journals**, please check [This Repo](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Fawesome-AI-tutorials-surveys). \n\n\n\n\n## Survey paper\n\n[**Transformers in Time Series: A Survey**](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.07125) (IJCAI'23 Survey Track)\n\n[Qingsong Wen](https:\u002F\u002Fsites.google.com\u002Fsite\u002Fqingsongwen8\u002F), Tian Zhou, Chaoli Zhang, Weiqi Chen, Ziqing Ma, [Junchi Yan](https:\u002F\u002Fthinklab.sjtu.edu.cn\u002F) and [Liang Sun](https:\u002F\u002Fscholar.google.com\u002Fcitations?user=8JbrsgUAAAAJ&hl=en).\n\n#### If you find this repository helpful for your work, please kindly cite our survey paper.\n\n```bibtex\n@inproceedings{wen2023transformers,\n  title={Transformers in time series: A survey},\n  author={Wen, Qingsong and Zhou, Tian and Zhang, Chaoli and Chen, Weiqi and Ma, Ziqing and Yan, Junchi and Sun, Liang},\n  booktitle={International Joint Conference on Artificial Intelligence(IJCAI)},\n  year={2023}\n}\n```\n\n## Taxonomy of Transformers for time series modeling\n\u003C!-- ![xxx](https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fqingsongedu_time-series-transformers-review_readme_622fdc5ece29.jpg) -->\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fqingsongedu_time-series-transformers-review_readme_622fdc5ece29.jpg\" width=700 align=middle> \u003Cbr \u002F>\n\n\n## Application Domains of Time Series Transformers\n\n\n[\\[official code\\]]()\n### Transformers in Forecasting\n#### Time Series Forecasting\n* CARD: Channel Aligned Robust Blend Transformer for Time Series Forecasting, in *ICLR* 2024. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=MJksrOhurE) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fwxie9\u002Fcard)\n* Pathformer: Multi-scale Transformers with Adaptive Pathways for Time Series Forecasting, in *ICLR* 2024. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=lJkOCMP2aW) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fdecisionintelligence\u002Fpathformer)\n* GAFormer: Enhancing Timeseries Transformers Through Group-Aware Embeddings, in *ICLR* 2024. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=c56TWtYp0W) \n* Transformer-Modulated Diffusion Models for Probabilistic Multivariate Time Series Forecasting, in *ICLR* 2024. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=qae04YACHs)\n* iTransformer: Inverted Transformers Are Effective for Time Series Forecasting, in *ICLR* 2024. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=JePfAI8fah)\n* Considering Nonstationary within Multivariate Time Series with Variational Hierarchical Transformer for Forecasting, in *AAAI* 2024. [\\[paper\\]]()\n* Latent Diffusion Transformer for Probabilistic Time Series Forecasting, in *AAAI* 2024. [\\[paper\\]]()\n* BasisFormer: Attention-based Time Series Forecasting with Learnable and Interpretable Basis, in *NeurIPS* 2023.  [\\[paper\\]](https:\u002F\u002Fneurips.cc\u002Fvirtual\u002F2023\u002Fposter\u002F69976)\n* ContiFormer: Continuous-Time Transformer for Irregular Time Series Modeling, in *NeurIPS* 2023.  [\\[paper\\]](https:\u002F\u002Fneurips.cc\u002Fvirtual\u002F2023\u002Fposter\u002F71304)\n* A Time Series is Worth 64 Words: Long-term Forecasting with Transformers, in *ICLR* 2023. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=Jbdc0vTOcol) [\\[code\\]](https:\u002F\u002Fgithub.com\u002Fyuqinie98\u002FPatchTST)\n* Crossformer: Transformer Utilizing Cross-Dimension Dependency for Multivariate Time Series Forecasting, in *ICLR* 2023. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=vSVLM2j9eie)\n* Scaleformer: Iterative Multi-scale Refining Transformers for Time Series Forecasting, in *ICLR* 2023. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=sCrnllCtjoE)\n* Non-stationary Transformers: Rethinking the Stationarity in Time Series Forecasting, in *NeurIPS* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2205.14415) \n* Learning to Rotate: Quaternion Transformer for Complicated Periodical Time Series Forecasting”, in *KDD* 2022. [\\[paper\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002F10.1145\u002F3534678.3539234) \n* FEDformer: Frequency Enhanced Decomposed Transformer for Long-term Series Forecasting, in *ICML* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.12740) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002FMAZiqing\u002FFEDformer)\n* TACTiS: Transformer-Attentional Copulas for Time Series, in *ICML* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.03528) \n* Pyraformer: Low-Complexity Pyramidal Attention for Long-Range Time Series Modeling and Forecasting, in *ICLR* 2022. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=0EXmFzUn5I) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Falipay\u002FPyraformer) \n* Autoformer: Decomposition transformers with auto-correlation for long-term series forecasting, in *NeurIPS* 2021. [\\[paper\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2021\u002Fhash\u002Fbcc0d400288793e8bdcd7c19a8ac0c2b-Abstract.html) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fthuml\u002Fautoformer)\n* Informer: Beyond efficient transformer for long sequence time-series forecasting, in *AAAI* 2021. [\\[paper\\]](https:\u002F\u002Fwww.aaai.org\u002FAAAI21Papers\u002FAAAI-7346.ZhouHaoyi.pdf) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fzhouhaoyi\u002FInformer2020) [\\[dataset\\]](https:\u002F\u002Fgithub.com\u002Fzhouhaoyi\u002FETDataset) \n* Temporal fusion transformers for interpretable multi-horizon time series forecasting, in *International Journal of Forecasting* 2021. [\\[paper\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS0169207021000637) [\\[code\\]](https:\u002F\u002Fgithub.com\u002Fmattsherar\u002FTemporal_Fusion_Transform)\n* Probabilistic Transformer For Time Series Analysis, in *NeurIPS* 2021. [\\[paper\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2021\u002Fhash\u002Fc68bd9055776bf38d8fc43c0ed283678-Abstract.html)  \n* Deep Transformer Models for Time Series Forecasting: The Influenza Prevalence Case, in *arXiv* 2020. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2001.08317)\n* Adversarial sparse transformer for time series forecasting, in *NeurIPS* 2020. [\\[paper\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2020\u002Fhash\u002Fc6b8c8d762da15fa8dbbdfb6baf9e260-Abstract.html) [\\[code\\]](https:\u002F\u002Fgithub.com\u002Fhihihihiwsf\u002FAST)\n* Enhancing the locality and breaking the memory bottleneck of transformer on time series forecasting, in *NeurIPS* 2019. [\\[paper\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2019\u002Fhash\u002F6775a0635c302542da2c32aa19d86be0-Abstract.html) [\\[code\\]](https:\u002F\u002Fgithub.com\u002Fmlpotter\u002FTransformer_Time_Series)\n* SSDNet: State Space Decomposition Neural Network for Time Series Forecasting, in *ICDM* 2021, [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2112.10251)\n* From Known to Unknown: Knowledge-guided Transformer for Time-Series Sales Forecasting in Alibaba, in *arXiv* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2109.08381)\n* TCCT: Tightly-coupled convolutional transformer on time series forecasting, in *Neurocomputing* 2022. [\\[paper\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS0925231222000571)\n* Triformer: Triangular, Variable-Specific Attentions for Long Sequence Multivariate Time Series Forecasting, in *IJCAI* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2204.13767)\n\n\n #### Spatio-Temporal Forecasting\n* AirFormer: Predicting Nationwide Air Quality in China with Transformers, in *AAAI* 2023. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2211.15979) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fyoshall\u002FAirFormer) \n* Earthformer: Exploring Space-Time Transformers for Earth System Forecasting, in *NeurIPS* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.05833) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Famazon-science\u002Fearth-forecasting-transformer)\n* Bidirectional Spatial-Temporal Adaptive Transformer for Urban Traffic Flow Forecasting, in *TNNLS* 2022. [\\[paper\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9810964)\n* Spatio-temporal graph transformer networks for pedestrian trajectory prediction, in *ECCV* 2020. [\\[paper\\]](https:\u002F\u002Fwww.ecva.net\u002Fpapers\u002Feccv_2020\u002Fpapers_ECCV\u002Fhtml\u002F1636_ECCV_2020_paper.php) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002FMajiker\u002FSTAR)\n* Spatial-temporal transformer networks for traffic flow forecasting, in *arXiv* 2020. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2001.02908) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fxumingxingsjtu\u002FSTTN)\n* Traffic transformer: Capturing the continuity and periodicity of time series for traffic forecasting, in *Transactions in GIS* 2022. [\\[paper\\]](https:\u002F\u002Fcoolgiserz.github.io\u002Fpublication\u002Ftraffic-transformer-capturing-the-continuity-and-periodicity-of-time-series-for-traffic-forecasting\u002Ftraffic-transformer-capturing-the-continuity-and-periodicity-of-time-series-for-traffic-forecasting.pdf)\n\n #### Event Irregular Time Series Modeling\n* Time Series as Images: Vision Transformer for Irregularly Sampled Time Series，in *NeurIPS* 2023. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=ZmeAoWQqe0)\n* ContiFormer: Continuous-Time Transformer for Irregular Time Series Modeling，in *NeurIPS* 2023. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=YJDz4F2AZu)\n* HYPRO: A Hybridly Normalized Probabilistic Model for Long-Horizon Prediction of Event Sequences，in *NeurIPS* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2210.01753) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fant-research\u002Fhypro_tpp) \n* Transformer Embeddings of Irregularly Spaced Events and Their Participants, in *ICLR* 2022. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=Rty5g9imm7H) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fyangalan123\u002Fanhp-andtt)\n* Self-attentive Hawkes process, in *ICML* 2020. [\\[paper\\]](http:\u002F\u002Fproceedings.mlr.press\u002Fv119\u002Fzhang20q.html) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002FQiangAIResearcher\u002Fsahp_repo)\n* Transformer Hawkes process, in *ICML* 2020. [\\[paper\\]](https:\u002F\u002Fproceedings.mlr.press\u002Fv119\u002Fzuo20a.html) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002FSimiaoZuo\u002FTransformer-Hawkes-Process)\n\n\n\n### Transformers in Anomaly Detection\n* MEMTO: Memory-guided Transformer for Multivariate Time Series Anomaly Detection，in *NeurIPS* 2023. [\\[paper\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=UFW67uduJd)\n* CAT: Beyond Efficient Transformer for Content-Aware Anomaly Detection in Event Sequences, in *KDD* 2022. [\\[paper\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fabs\u002F10.1145\u002F3534678.3539155) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fmmichaelzhang\u002FCAT)\n* DCT-GAN: Dilated Convolutional Transformer-based GAN for Time Series Anomaly Detection, in *TKDE* 2022. [\\[paper\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9626552)\n* Concept Drift Adaptation for Time Series Anomaly Detection via Transformer, in *Neural Processing Letters* 2022. [\\[paper\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs11063-022-11015-0)\n* Anomaly Transformer: Time Series Anomaly Detection with Association Discrepancy, in *ICLR* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.02642) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fthuml\u002FAnomaly-Transformer)\n* TranAD: Deep Transformer Networks for Anomaly Detection in Multivariate Time Series Data, in *VLDB* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.07284) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fimperial-qore\u002Ftranad)\n* Learning graph structures with transformer for multivariate time series anomaly detection in IoT, in *IEEE Internet of Things Journal* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2104.03466) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002FZEKAICHEN\u002FGTA)\n* Spacecraft Anomaly Detection via Transformer Reconstruction Error, in *ICASSE* 2019. [\\[paper\\]](http:\u002F\u002Fwww.utias.utoronto.ca\u002Fwp-content\u002Fuploads\u002F2019\u002F07\u002F88-Spacecraft-anomaly-detection-via-transformer-reconstruction-error.pdf)\n* Unsupervised Anomaly Detection in Multivariate Time Series through Transformer-based Variational Autoencoder, in *CCDC* 2021. [\\[paper\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9601669)\n* Variational Transformer-based anomaly detection approach for multivariate time series, in *Measurement* 2022. [\\[paper\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fabs\u002Fpii\u002FS0263224122000914)\n\n### Transformers in Classification\n* Time Series as Images: Vision Transformer for Irregularly Sampled Time Series, in *NeurIPS* 2023. [\\[paper\\]](https:\u002F\u002Fneurips.cc\u002Fvirtual\u002F2023\u002Fposter\u002F71219)\n* TrajFormer: Efficient Trajectory Classification with Transformers, in *CIKM* 2022. [\\[paper\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3511808.3557481)\n* TARNet : Task-Aware Reconstruction for Time-Series Transformer, in *KDD* 2022. [\\[paper\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3534678.3539329) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Franakroychowdhury\u002FTARNet)\n* A transformer-based framework for multivariate time series representation learning, in *KDD* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2010.02803) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fgzerveas\u002Fmvts_transformer)\n* Voice2series: Reprogramming acoustic models for time series classification, in *ICML* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.09296) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fhuckiyang\u002FVoice2Series-Reprogramming)\n* Gated Transformer Networks for Multivariate Time Series Classification, in *arXiv* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2103.14438) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002FZZUFaceBookDL\u002FGTN)\n* Self-attention for raw optical satellite time series classification, in *ISPRS Journal of Photogrammetry and Remote Sensing* 2020. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F1910.10536) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fmarccoru\u002Fcrop-type-mapping)\n* Self-supervised pretraining of transformers for satellite image time series classification, in *IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing* 2020. [\\[paper\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9252123)\n* Self-Supervised Transformer for Sparse and Irregularly Sampled Multivariate Clinical Time-Series, in *ACM TKDD* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2107.14293) [\\[official code\\]](https:\u002F\u002Fgithub.com\u002Fsindhura97\u002FSTraTS)\n\n\n\n## Time Series Related Survey\n* What Can Large Language Models Tell Us about Time Series Analysis, in *arXiv* 2024. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2402.02713)\n* Large Models for Time Series and Spatio-Temporal Data: A Survey and Outlook, in *arXiv* 2023. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2310.10196) [\\[Website\\]](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002FAwesome-TimeSeries-SpatioTemporal-LM-LLM)\n* Deep Learning for Multivariate Time Series Imputation: A Survey, in *arXiv* 2024. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2402.04059) [\\[Website\\]](https:\u002F\u002Fgithub.com\u002Fwenjiedu\u002Fawesome_imputation)\n* Self-Supervised Learning for Time Series Analysis: Taxonomy, Progress, and Prospects, in *arXiv* 2023. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2306.10125) [\\[Website\\]](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002FAwesome-SSL4TS)\n* A Survey on Graph Neural Networks for Time Series: Forecasting, Classification, Imputation, and Anomaly Detection, in *arXiv* 2023. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2307.03759) [\\[Website\\]](https:\u002F\u002Fgithub.com\u002FKimMeen\u002FAwesome-GNN4TS)\n* Time series data augmentation for deep learning: a survey, in *IJCAI* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2002.12478)\n* Neural temporal point processes: a review, in *IJCAI* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2104.03528v5)\n* Time-series forecasting with deep learning: a survey, in *Philosophical Transactions of the Royal Society A* 2021. [\\[paper\\]](https:\u002F\u002Froyalsocietypublishing.org\u002Fdoi\u002Ffull\u002F10.1098\u002Frsta.2020.0209)\n* Deep learning for time series forecasting: a survey, in *Big Data* 2021. [\\[paper\\]](https:\u002F\u002Fwww.liebertpub.com\u002Fdoi\u002Fabs\u002F10.1089\u002Fbig.2020.0159)\n* Neural forecasting: Introduction and literature overview, in *arXiv* 2020. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2004.10240) \n* Deep learning for anomaly detection in time-series data: review, analysis, and guidelines, in *Access* 2021. [\\[paper\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9523565) \n* A review on outlier\u002Fanomaly detection in time series data, in *ACM Computing Surveys* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2002.04236)\n* A unifying review of deep and shallow anomaly detection, in *Proceedings of the IEEE* 2021. [\\[paper\\]](http:\u002F\u002F128.84.4.34\u002Fabs\u002F2009.11732)\n* Deep learning for time series classification: a review, in *Data Mining and Knowledge Discovery* 2019. [\\[paper\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs10618-019-00619-1?sap-outbound-id=11FC28E054C1A9EB6F54F987D4B526A6EE3495FD&mkt-key=005056A5C6311EE999A3A1E864CDA986)\n* More related time series surveys, tutorials, and papers can be found at this [repo](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Fawesome-AI-for-time-series-papers).\n\n \n## Transformer\u002FAttention Tutorial\u002FSurvey in Other Disciplines\n* Everything You Need to Know about Transformers: Architectures, Optimization, Applications, and Interpretation, in *AAAI Tutorial* 2023. [\\[link\\]](https:\u002F\u002Ftransformer-tutorial.github.io\u002Faaai2023\u002F)  \n* Transformer Architectures for Multimodal Signal Processing and Decision Making, in *ICASSP Tutorial* 2022. [\\[link\\]](https:\u002F\u002Ftransformer-tutorial.github.io\u002Ficassp2022\u002F)  \n* Efficient transformers: A survey, in *ACM Computing Surveys* 2022. [\\[paper\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002F10.1145\u002F3530811) [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2009.06732)\n* A survey on visual transformer, in *IEEE TPAMI* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2012.12556)\n* A General Survey on Attention Mechanisms in Deep Learning, in *IEEE TKDE* 2022. [\\[paper\\]](https:\u002F\u002Fpersonal.eur.nl\u002Ffrasincar\u002Fpapers\u002FTKDE2022\u002Ftkde2022.pdf)\n* Attention, please! A survey of neural attention models in deep learning, in *Artificial Intelligence Review* 2022. [\\[paper\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs10462-022-10148-x)\n* Attention mechanisms in computer vision: A survey, in *Computational Visual Media* 2022. [\\[paper\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs41095-022-0271-y)\n* Survey: Transformer based video-language pre-training, in _AI Open_ 2022. [\\[paper\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS2666651022000018)\n* Transformers in vision: A survey, in *ACM Computing Surveys* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2101.01169)\n* Pre-trained models: Past, present and future, in *AI Open* 2021. [\\[paper\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS2666651021000231)\n* An attentive survey of attention models, in *ACM TIST* 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F1904.02874)\n* Attention in natural language processing, in *IEEE TNNLS* 2020. [\\[paper\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fstamp\u002Fstamp.jsp?tp=&arnumber=9194070)\n* Pre-trained models for natural language processing: A survey, in *Science China Technological Sciences* 2020. [\\[paper\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs11431-020-1647-3)\n* A review on the attention mechanism of deep learning, in *Neurocomputing* 2021. [\\[paper\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fabs\u002Fpii\u002FS092523122100477X)\n* A Survey of Transformers, in _arXiv_ 2021. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.04554)\n* A Survey of Vision-Language Pre-Trained Models, in _arXiv_ 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.10936)\n* Video Transformers: A Survey, in *arXiv* 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.05991)\n* Transformer for Graphs: An Overview from Architecture Perspective, in _arXiv_ 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.08455)\n* Transformers in Medical Imaging: A Survey, in _arXiv_ 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.09873) \n* A Survey of Controllable Text Generation using Transformer-based Pre-trained Language Models, in _arXiv_ 2022. [\\[paper\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.05337) \n\n","# 时间序列中的 Transformer  \n\n[![Awesome](https:\u002F\u002Fawesome.re\u002Fbadge.svg)](https:\u002F\u002Fawesome.re) \n![欢迎 PR](https:\u002F\u002Fimg.shields.io\u002Fbadge\u002FPRs-Welcome-green) \n![星标数](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fstars\u002Fqingsongedu\u002Ftime-series-transformers-review)\n[![访问量徽章](https:\u002F\u002Fbadges.pufler.dev\u002Fvisits\u002Fqingsongedu\u002Ftime-series-transformers-review)](https:\u002F\u002Fbadges.pufler.dev\u002Fvisits\u002Fqingsongedu\u002Ftime-series-transformers-review)\n\u003C!-- ![复刻数](https:\u002F\u002Fimg.shields.io\u002Fgithub\u002Fforks\u002Fqingsongedu\u002Ftime-series-transformers-review) -->\n\n\n这是一份由专业人士精心整理的关于**时间序列中的 Transformer**的优质资源列表（包括论文、代码、数据等），也是目前我们所知的首份全面且系统性地总结 Transformer 在时间序列建模领域最新进展的工作。\n\n我们将持续更新此列表，以纳入最新的资源。如果您发现有任何遗漏的资源（论文或代码）或错误，请随时提交 issue 或 pull request。\n\n如需了解在**顶级人工智能会议和期刊**上发表的关于**时间序列人工智能（AI4TS）**的通用论文、教程和综述，请查看[此仓库](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Fawesome-AI-for-time-series-papers)。\n\n如需了解在**顶级人工智能会议和期刊**上发表的关于**各类人工智能领域的最新进展：教程与综述**（深度学习、机器学习、数据挖掘、计算机视觉、自然语言处理、语音处理等），请查看[此仓库](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Fawesome-AI-tutorials-surveys)。\n\n\n\n\n## 综述论文\n\n[**时间序列中的 Transformer：综述**](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.07125)（IJCAI 2023 综述赛道）\n\n[温庆松](https:\u002F\u002Fsites.google.com\u002Fsite\u002Fqingsongwen8\u002F)、周天、张超立、陈伟奇、马子清、[严骏驰](https:\u002F\u002Fthinklab.sjtu.edu.cn\u002F) 和 [孙亮](https:\u002F\u002Fscholar.google.com\u002Fcitations?user=8JbrsgUAAAAJ&hl=en)。\n\n#### 如果您认为本仓库对您的工作有所帮助，请引用我们的综述论文。\n\n```bibtex\n@inproceedings{wen2023transformers,\n  title={Transformers in time series: A survey},\n  author={Wen, Qingsong and Zhou, Tian and Zhang, Chaoli and Chen, Weiqi and Ma, Ziqing and Yan, Junchi and Sun, Liang},\n  booktitle={International Joint Conference on Artificial Intelligence(IJCAI)},\n  year={2023}\n}\n```\n\n## 时间序列建模中 Transformer 的分类体系\n\u003C!-- ![xxx](https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fqingsongedu_time-series-transformers-review_readme_622fdc5ece29.jpg) -->\n\u003Cimg src=\"https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fqingsongedu_time-series-transformers-review_readme_622fdc5ece29.jpg\" width=700 align=center> \u003Cbr \u002F>\n\n\n## 时间序列 Transformer 的应用领域\n\n\n[\\[官方代码\\]]()\n\n### 转换器在预测中的应用\n#### 时间序列预测\n* CARD：面向时间序列预测的通道对齐鲁棒混合转换器，发表于 *ICLR* 2024。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=MJksrOhurE) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fwxie9\u002Fcard)\n* Pathformer：用于时间序列预测的具有自适应路径的多尺度转换器，发表于 *ICLR* 2024。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=lJkOCMP2aW) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fdecisionintelligence\u002Fpathformer)\n* GAFormer：通过组感知嵌入增强时间序列转换器，发表于 *ICLR* 2024。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=c56TWtYp0W)\n* 基于转换器调制的扩散模型用于概率性多元时间序列预测，发表于 *ICLR* 2024。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=qae04YACHs)\n* iTransformer：反转转换器在时间序列预测中同样有效，发表于 *ICLR* 2024。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=JePfAI8fah)\n* 在多元时间序列中考虑非平稳性：基于变分层次转换器的预测方法，发表于 *AAAI* 2024。[\\[论文\\]]()\n* 用于概率性时间序列预测的潜在扩散转换器，发表于 *AAAI* 2024。[\\[论文\\]]()\n* BasisFormer：基于注意力的时间序列预测，采用可学习且可解释的基底，发表于 *NeurIPS* 2023。[\\[论文\\]](https:\u002F\u002Fneurips.cc\u002Fvirtual\u002F2023\u002Fposter\u002F69976)\n* ContiFormer：用于不规则时间序列建模的连续时间转换器，发表于 *NeurIPS* 2023。[\\[论文\\]](https:\u002F\u002Fneurips.cc\u002Fvirtual\u002F2023\u002Fposter\u002F71304)\n* 一段时间序列胜过64个词：利用转换器进行长期预测，发表于 *ICLR* 2023。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=Jbdc0vTOcol) [\\[代码\\]](https:\u002F\u002Fgithub.com\u002Fyuqinie98\u002FPatchTST)\n* Crossformer：利用跨维度依赖性的转换器用于多元时间序列预测，发表于 *ICLR* 2023。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=vSVLM2j9eie)\n* Scaleformer：用于时间序列预测的迭代式多尺度精炼转换器，发表于 *ICLR* 2023。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=sCrnllCtjoE)\n* 非平稳性转换器：重新思考时间序列预测中的平稳性假设，发表于 *NeurIPS* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2205.14415)\n* 学习旋转：四元数转换器用于复杂周期性时间序列预测，发表于 *KDD* 2022。[\\[论文\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002F10.1145\u002F3534678.3539234)\n* FEDformer：用于长期序列预测的频率增强分解转换器，发表于 *ICML* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.12740) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002FMAZiqing\u002FFEDformer)\n* TACTiS：用于时间序列的转换器-注意力耦合模型，发表于 *ICML* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.03528)\n* Pyraformer：用于长距离时间序列建模与预测的低复杂度金字塔注意力机制，发表于 *ICLR* 2022。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=0EXmFzUn5I) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Falipay\u002FPyraformer)\n* Autoformer：结合自相关性的分解转换器用于长期序列预测，发表于 *NeurIPS* 2021。[\\[论文\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2021\u002Fhash\u002Fbcc0d400288793e8bdcd7c19a8ac0c2b-Abstract.html) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fthuml\u002Fautoformer)\n* Informer：超越高效转换器的长序列时间序列预测方法，发表于 *AAAI* 2021。[\\[论文\\]](https:\u002F\u002Fwww.aaai.org\u002FAAAI21Papers\u002FAAAI-7346.ZhouHaoyi.pdf) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fzhouhaoyi\u002FInformer2020) [\\[数据集\\]](https:\u002F\u002Fgithub.com\u002Fzhouhaoyi\u002FETDataset)\n* 用于可解释多 horizon 时间序列预测的时间融合转换器，发表于 *International Journal of Forecasting* 2021。[\\[论文\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS0169207021000637) [\\[代码\\]](https:\u002F\u002Fgithub.com\u002Fmattsherar\u002FTemporal_Fusion_Transform)\n* 用于时间序列分析的概率性转换器，发表于 *NeurIPS* 2021。[\\[论文\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2021\u002Fhash\u002Fc68bd9055776bf38d8fc43c0ed283678-Abstract.html)\n* 深度转换器模型用于时间序列预测：以流感流行为例，发表于 *arXiv* 2020。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2001.08317)\n* 用于时间序列预测的对抗稀疏转换器，发表于 *NeurIPS* 2020。[\\[论文\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2020\u002Fhash\u002Fc6b8c8d762da15fa8dbbdfb6baf9e260-Abstract.html) [\\[代码\\]](https:\u002F\u002Fgithub.com\u002Fhihihihiwsf\u002FAST)\n* 提升转换器在时间序列预测中的局部性并突破内存瓶颈，发表于 *NeurIPS* 2019。[\\[论文\\]](https:\u002F\u002Fproceedings.neurips.cc\u002Fpaper\u002F2019\u002Fhash\u002F6775a0635c302542da2c32aa19d86be0-Abstract.html) [\\[代码\\]](https:\u002F\u002Fgithub.com\u002Fmlpotter\u002FTransformer_Time_Series)\n* SSDNet：用于时间序列预测的状态空间分解神经网络，发表于 *ICDM* 2021，[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2112.10251)\n* 从已知到未知：阿里巴巴知识引导型转换器用于时间序列销售预测，发表于 *arXiv* 2021。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2109.08381)\n* TCCT：紧密耦合的卷积转换器用于时间序列预测，发表于 *Neurocomputing* 2022。[\\[论文\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS0925231222000571)\n* Triformer：三角形、变量特异性注意力机制用于长序列多元时间序列预测，发表于 *IJCAI* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2204.13767)\n\n\n #### 空间-时间预测\n* AirFormer：利用转换器预测中国全国空气质量，发表于 *AAAI* 2023。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2211.15979) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fyoshall\u002FAirFormer)\n* Earthformer：探索时空转换器用于地球系统预测，发表于 *NeurIPS* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2207.05833) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Famazon-science\u002Fearth-forecasting-transformer)\n* 双向时空自适应转换器用于城市交通流量预测，发表于 *TNNLS* 2022。[\\[论文\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9810964)\n* 用于行人轨迹预测的空间-时间图转换器网络，发表于 *ECCV* 2020。[\\[论文\\]](https:\u002F\u002Fwww.ecva.net\u002Fpapers\u002Feccv_2020\u002Fpapers_ECCV\u002Fhtml\u002F1636_ECCV_2020_paper.php) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002FMajiker\u002FSTAR)\n* 用于交通流量预测的空间-时间转换器网络，发表于 *arXiv* 2020。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2001.02908) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fxumingxingsjtu\u002FSTTN)\n* 交通转换器：捕捉时间序列的连续性和周期性以进行交通预测，发表于 *Transactions in GIS* 2022。[\\[论文\\]](https:\u002F\u002Fcoolgiserz.github.io\u002Fpublication\u002Ftraffic-transformer-capturing-the-continuity-and-periodicity-of-time-series-for-traffic-forecasting\u002Ftraffic-transformer-capturing-the-continuity-and-periodicity-of-time-series-for-traffic-forecasting.pdf)\n\n#### 事件型不规则时间序列建模\n* 时间序列作为图像：用于不规则采样时间序列的视觉Transformer，发表于 *NeurIPS* 2023。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=ZmeAoWQqe0)\n* ContiFormer：用于不规则时间序列建模的连续时间Transformer，发表于 *NeurIPS* 2023。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=YJDz4F2AZu)\n* HYPRO：一种混合归一化的概率模型，用于事件序列的长 horizon 预测，发表于 *NeurIPS* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2210.01753) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fant-research\u002Fhypro_tpp)\n* 不规则间隔事件及其参与者的Transformer嵌入，发表于 *ICLR* 2022。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=Rty5g9imm7H) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fyangalan123\u002Fanhp-andtt)\n* 自注意力霍克斯过程，发表于 *ICML* 2020。[\\[论文\\]](http:\u002F\u002Fproceedings.mlr.press\u002Fv119\u002Fzhang20q.html) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002FQiangAIResearcher\u002Fsahp_repo)\n* Transformer霍克斯过程，发表于 *ICML* 2020。[\\[论文\\]](https:\u002F\u002Fproceedings.mlr.press\u002Fv119\u002Fzuo20a.html) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002FSimiaoZuo\u002FTransformer-Hawkes-Process)\n\n\n\n\n\n### 异常检测中的Transformer\n* MEMTO：用于多变量时间序列异常检测的记忆引导Transformer，发表于 *NeurIPS* 2023。[\\[论文\\]](https:\u002F\u002Fopenreview.net\u002Fforum?id=UFW67uduJd)\n* CAT：超越高效Transformer的内容感知事件序列异常检测，发表于 *KDD* 2022。[\\[论文\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fabs\u002F10.1145\u002F3534678.3539155) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fmmichaelzhang\u002FCAT)\n* DCT-GAN：基于扩张卷积Transformer的GAN，用于时间序列异常检测，发表于 *TKDE* 2022。[\\[论文\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9626552)\n* 基于Transformer的时间序列异常检测中的概念漂移适应，发表于 *Neural Processing Letters* 2022。[\\[论文\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs11063-022-11015-0)\n* 异常Transformer：基于关联差异的时间序列异常检测，发表于 *ICLR* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2110.02642) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fthuml\u002FAnomaly-Transformer)\n* TranAD：用于多变量时间序列数据异常检测的深度Transformer网络，发表于 *VLDB* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.07284) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fimperial-qore\u002Ftranad)\n* 使用Transformer学习图结构以进行物联网中多变量时间序列的异常检测，发表于 *IEEE Internet of Things Journal* 2021。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2104.03466) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002FZEKAICHEN\u002FGTA)\n* 基于Transformer重建误差的航天器异常检测，发表于 *ICASSE* 2019。[\\[论文\\]](http:\u002F\u002Fwww.utias.utoronto.ca\u002Fwp-content\u002Fuploads\u002F2019\u002F07\u002F88-Spacecraft-anomaly-detection-via-transformer-reconstruction-error.pdf)\n* 基于Transformer的变分自编码器实现多变量时间序列的无监督异常检测，发表于 *CCDC* 2021。[\\[论文\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9601669)\n* 基于变分Transformer的多变量时间序列异常检测方法，发表于 *Measurement* 2022。[\\[论文\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fabs\u002Fpii\u002FS0263224122000914)\n\n### 分类中的Transformer\n* 时间序列作为图像：用于不规则采样时间序列的视觉Transformer，发表于 *NeurIPS* 2023。[\\[论文\\]](https:\u002F\u002Fneurips.cc\u002Fvirtual\u002F2023\u002Fposter\u002F71219)\n* TrajFormer：使用Transformer进行高效的轨迹分类，发表于 *CIKM* 2022。[\\[论文\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3511808.3557481)\n* TARNet：面向任务的时间序列Transformer重构，发表于 *KDD* 2022。[\\[论文\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002Fpdf\u002F10.1145\u002F3534678.3539329) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Franakroychowdhury\u002FTARNet)\n* 基于Transformer的多变量时间序列表示学习框架，发表于 *KDD* 2021。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2010.02803) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fgzerveas\u002Fmvts_transformer)\n* Voice2series：为时间序列分类重新编程声学模型，发表于 *ICML* 2021。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.09296) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fhuckiyang\u002FVoice2Series-Reprogramming)\n* 门控Transformer网络用于多变量时间序列分类，发表于 *arXiv* 2021。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2103.14438) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002FZZUFaceBookDL\u002FGTN)\n* 自注意力用于原始光学卫星时间序列分类，发表于 *ISPRS Journal of Photogrammetry and Remote Sensing* 2020。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F1910.10536) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fmarccoru\u002Fcrop-type-mapping)\n* 卫星影像时间序列分类的自监督预训练Transformer，发表于 *IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing* 2020。[\\[论文\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9252123)\n* 用于稀疏且不规则采样多变量临床时间序列的自监督Transformer，发表于 *ACM TKDD* 2022。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2107.14293) [\\[官方代码\\]](https:\u002F\u002Fgithub.com\u002Fsindhura97\u002FSTraTS)\n\n## 时间序列相关综述\n* 大型语言模型能为时间序列分析带来什么启示，发表于 *arXiv* 2024年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2402.02713)\n* 面向时间序列与时空数据的大型模型：综述与展望，发表于 *arXiv* 2023年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2310.10196) [\\[网站\\]](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002FAwesome-TimeSeries-SpatioTemporal-LM-LLM)\n* 多变量时间序列插补中的深度学习：综述，发表于 *arXiv* 2024年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2402.04059) [\\[网站\\]](https:\u002F\u002Fgithub.com\u002Fwenjiedu\u002Fawesome_imputation)\n* 自监督学习在时间序列分析中的应用：分类、进展与前景，发表于 *arXiv* 2023年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2306.10125) [\\[网站\\]](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002FAwesome-SSL4TS)\n* 图神经网络在时间序列中的应用综述：预测、分类、插补与异常检测，发表于 *arXiv* 2023年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2307.03759) [\\[网站\\]](https:\u002F\u002Fgithub.com\u002FKimMeen\u002FAwesome-GNN4TS)\n* 深度学习中时间序列数据增强技术综述，发表于 *IJCAI* 2021年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2002.12478)\n* 神经时序点过程综述，发表于 *IJCAI* 2021年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2104.03528v5)\n* 深度学习在时间序列预测中的应用综述，发表于 *Philosophical Transactions of the Royal Society A* 2021年。[\\[论文\\]](https:\u002F\u002Froyalsocietypublishing.org\u002Fdoi\u002Ffull\u002F10.1098\u002Frsta.2020.0209)\n* 深度学习在时间序列预测中的应用综述，发表于 *Big Data* 2021年。[\\[论文\\]](https:\u002F\u002Fwww.liebertpub.com\u002Fdoi\u002Fabs\u002F10.1089\u002Fbig.2020.0159)\n* 神经预测方法：介绍与文献综述，发表于 *arXiv* 2020年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2004.10240)\n* 深度学习在时间序列异常检测中的应用：综述、分析与指南，发表于 *Access* 2021年。[\\[论文\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fabstract\u002Fdocument\u002F9523565)\n* 时间序列数据中的离群点\u002F异常检测综述，发表于 *ACM Computing Surveys* 2021年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2002.04236)\n* 深度与浅层异常检测的统一综述，发表于 *Proceedings of the IEEE* 2021年。[\\[论文\\]](http:\u002F\u002F128.84.4.34\u002Fabs\u002F2009.11732)\n* 深度学习在时间序列分类中的应用综述，发表于 *Data Mining and Knowledge Discovery* 2019年。[\\[论文\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs10618-019-00619-1?sap-outbound-id=11FC28E054C1A9EB6F54F987D4B526A6EE3495FD&mkt-key=005056A5C6311EE999A3A1E864CDA986)\n* 更多相关的时间序列综述、教程和论文可在该 [仓库](https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Fawesome-AI-for-time-series-papers) 中找到。\n\n \n## 其他领域中的 Transformer\u002F注意力机制教程与综述\n* 关于 Transformer 的全方位指南：架构、优化、应用与解释，发表于 *AAAI Tutorial* 2023年。[\\[链接\\]](https:\u002F\u002Ftransformer-tutorial.github.io\u002Faaai2023\u002F)  \n* 用于多模态信号处理与决策的 Transformer 架构，发表于 *ICASSP Tutorial* 2022年。[\\[链接\\]](https:\u002F\u002Ftransformer-tutorial.github.io\u002Ficassp2022\u002F)  \n* 高效 Transformer 综述，发表于 *ACM Computing Surveys* 2022年。[\\[论文\\]](https:\u002F\u002Fdl.acm.org\u002Fdoi\u002F10.1145\u002F3530811) [\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2009.06732)\n* 视觉 Transformer 综述，发表于 *IEEE TPAMI* 2022年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2012.12556)\n* 深度学习中注意力机制的通用综述，发表于 *IEEE TKDE* 2022年。[\\[论文\\]](https:\u002F\u002Fpersonal.eur.nl\u002Ffrasincar\u002Fpapers\u002FTKDE2022\u002Ftkde2022.pdf)\n* 注意力，请注意！深度学习中神经注意力模型的综述，发表于 *Artificial Intelligence Review* 2022年。[\\[论文\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs10462-022-10148-x)\n* 计算机视觉中的注意力机制综述，发表于 *Computational Visual Media* 2022年。[\\[论文\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs41095-022-0271-y)\n* 基于 Transformer 的视频-语言预训练综述，发表于 _AI Open_ 2022年。[\\[论文\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS2666651022000018)\n* 视觉领域的 Transformer 综述，发表于 *ACM Computing Surveys* 2021年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2101.01169)\n* 预训练模型：过去、现在与未来，发表于 *AI Open* 2021年。[\\[论文\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fpii\u002FS2666651021000231)\n* 注意力模型的细致综述，发表于 *ACM TIST* 2021年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F1904.02874)\n* 自然语言处理中的注意力机制，发表于 *IEEE TNNLS* 2020年。[\\[论文\\]](https:\u002F\u002Fieeexplore.ieee.org\u002Fstamp\u002Fstamp.jsp?tp=&arnumber=9194070)\n* 自然语言处理中的预训练模型综述，发表于 *Science China Technological Sciences* 2020年。[\\[论文\\]](https:\u002F\u002Flink.springer.com\u002Farticle\u002F10.1007\u002Fs11431-020-1647-3)\n* 深度学习中注意力机制的回顾，发表于 *Neurocomputing* 2021年。[\\[论文\\]](https:\u002F\u002Fwww.sciencedirect.com\u002Fscience\u002Farticle\u002Fabs\u002Fpii\u002FS092523122100477X)\n* Transformer 综述，发表于 _arXiv_ 2021年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2106.04554)\n* 视觉-语言预训练模型综述，发表于 _arXiv_ 2022年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.10936)\n* 视频 Transformer 综述，发表于 *arXiv* 2022年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.05991)\n* 面向图结构的 Transformer：从架构角度的概述，发表于 _arXiv_ 2022年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2202.08455)\n* 医学影像中的 Transformer 综述，发表于 _arXiv_ 2022年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.09873) \n* 基于 Transformer 预训练语言模型的可控文本生成综述，发表于 _arXiv_ 2022年。[\\[论文\\]](https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.05337)","# time-series-transformers-review 快速上手指南\n\n`time-series-transformers-review` 并非一个可直接安装的 Python 软件包，而是一个**精选资源列表仓库**。它系统性地整理了基于 Transformer 的时间序列分析（预测、异常检测等）的论文、代码和数据集。\n\n本指南将帮助你如何利用该仓库查找资源，并快速运行其中推荐的经典模型（以 `Autoformer` 和 `Informer` 为例）。\n\n## 环境准备\n\n在开始之前，请确保你的开发环境满足以下要求：\n\n*   **操作系统**: Linux (推荐), macOS, 或 Windows (WSL2)\n*   **Python 版本**: 3.8 - 3.10 (大多数时间序列深度学习项目在此范围兼容性最好)\n*   **硬件**: 建议配备 NVIDIA GPU 以加速模型训练和推理\n*   **前置依赖**:\n    *   Git\n    *   PyTorch (需根据 CUDA 版本安装)\n    *   Pandas, Numpy, Scikit-learn\n\n## 安装步骤\n\n由于这是一个资源索引库，你不需要通过 `pip` 安装它，而是需要克隆仓库以获取论文列表和对应模型的官方代码链接。随后，你需要克隆具体感兴趣的模型代码库。\n\n### 1. 克隆资源索引库\n首先获取最新的论文和代码清单：\n\n```bash\ngit clone https:\u002F\u002Fgithub.com\u002Fqingsongedu\u002Ftime-series-transformers-review.git\ncd time-series-transformers-review\n```\n\n### 2. 选择并克隆具体模型代码\n浏览仓库中的 `README.md` 或查看整理的论文列表，找到你需要的模型（例如 `Autoformer`）。点击其 `[official code]` 链接跳转到对应仓库进行克隆。\n\n**示例：克隆 Autoformer (NeurIPS 2021)**\n\n```bash\n# 进入你的项目目录\ncd ..\n# 克隆 Autoformer 官方代码库\ngit clone https:\u002F\u002Fgithub.com\u002Fthuml\u002Fautoformer.git\ncd autoformer\n```\n\n### 3. 安装模型依赖\n进入具体模型目录后，安装其所需的 Python 依赖。国内用户推荐使用清华源或阿里源加速安装。\n\n```bash\n# 使用 pip 安装依赖 (推荐国内镜像源)\npip install -r requirements.txt -i https:\u002F\u002Fpypi.tuna.tsinghua.edu.cn\u002Fsimple\n\n# 如果 requirements.txt 未包含 torch，请手动安装 (根据你的 CUDA 版本调整)\n# 示例：安装 PyTorch CPU 版本\npip install torch torchvision torchaudio -i https:\u002F\u002Fpypi.tuna.tsinghua.edu.cn\u002Fsimple\n# 示例：安装 PyTorch CUDA 11.8 版本\n# pip install torch torchvision torchaudio --index-url https:\u002F\u002Fdownload.pytorch.org\u002Fwhl\u002Fcu118\n```\n\n## 基本使用\n\n大多数基于该列表的时间序列模型都遵循相似的运行流程：**准备数据 -> 配置参数 -> 运行脚本**。以下以 `autoformer` 为例展示最简单的运行步骤。\n\n### 1. 准备数据\n大多数仓库会自带数据处理脚本或示例数据。通常需要将数据集（如 ETTh1, Weather 等）放置在 `.\u002Fdataset` 目录下。\n\n```bash\n# 创建数据目录（如果不存在）\nmkdir -p dataset\n# 将下载好的 .csv 数据文件放入 dataset 文件夹\n# 具体数据格式请参考各模型仓库的 README\n```\n\n### 2. 运行预测任务\n使用提供的运行脚本启动训练或预测。通常需要指定模型名称、数据集、预测长度等参数。\n\n**示例命令：**\n\n```bash\n# 运行 Autoformer 在 ETTh1 数据集上的长序列预测\n# -m: 模型名称\n# -d: 数据集名称\n# -l: 输入序列长度\n# -p: 预测序列长度\npython run.py \\\n  --model_name Autoformer \\\n  --data ETTh1 \\\n  --seq_len 96 \\\n  --pred_len 96 \\\n  --train_epochs 10\n```\n\n### 3. 查看结果\n运行结束后，程序通常会在 `.\u002Fresults` 或当前目录下生成日志文件和预测结果指标（如 MSE, MAE）。\n\n---\n\n**提示**：\n*   该仓库涵盖了 **Forecasting (预测)**, **Anomaly Detection (异常检测)**, **Spatio-Temporal (时空预测)** 等多个领域。\n*   对于其他模型（如 `Informer`, `FEDformer`, `PatchTST`），请参照各自官方仓库的 `README` 中的 \"Quick Start\" 部分，命令格式通常类似。\n*   如需引用相关成果，请在您的研究中引用综述论文：*Transformers in Time Series: A Survey (IJCAI'23)*。","某新能源公司的算法团队正致力于优化风力发电量的短期预测模型，以应对电网调度中因天气突变导致的波动挑战。\n\n### 没有 time-series-transformers-review 时\n- **文献检索如大海捞针**：团队成员需在 arXiv、Google Scholar 等多个平台分散搜索，难以系统性地掌握 Transformer 在时序领域的最新进展（如 ICLR 2024 的 iTransformer 或 Pathformer）。\n- **复现成本高昂且风险大**：找到的论文往往缺乏官方代码链接，或代码质量参差不齐，导致工程师花费数周时间清洗数据、重构网络，却仍无法复现论文效果。\n- **技术选型缺乏依据**：面对“注意力机制是否适合非平稳时序”等争议，团队缺乏权威的综述文章和分类体系作为参考，容易选错基线模型，走弯路。\n- **前沿动态滞后**：由于缺乏持续更新的资源列表，团队可能还在使用两年前的旧架构，错过了如扩散模型结合 Transformer 等能显著提升概率预测精度的新技术。\n\n### 使用 time-series-transformers-review 后\n- **一站式获取核心资源**：直接通过该清单锁定针对“非平稳序列”或“多变量预测”的顶会论文（如 NeurIPS 的 ContiFormer），并一键跳转至官方代码库，检索效率提升 80%。\n- **快速验证与落地**：利用清单中经过筛选的高质量代码仓库，团队在两天内即可完成 Baseline 搭建与对比实验，迅速验证新架构在风电数据上的有效性。\n- **科学决策技术路线**：借助清单提供的系统化分类图谱和综述论文，团队清晰理解了不同变体的适用场景，果断采用更适合长序列预测的 Inverted Transformer 架构。\n- **同步全球最新成果**：依托该项目的持续更新机制，团队第一时间引入了最新的 Group-Aware Embeddings 技术，显著提升了极端天气下的预测鲁棒性。\n\ntime-series-transformers-review 将原本需要数月的调研与试错过程压缩至数天，成为时序算法工程师攻克复杂预测难题的加速引擎。","https:\u002F\u002Foss.gittoolsai.com\u002Fimages\u002Fqingsongedu_time-series-transformers-review_45f9a3ea.png","qingsongedu","Qingsong Wen","https:\u002F\u002Foss.gittoolsai.com\u002Favatars\u002Fqingsongedu_1f9b5be6.png","Head of AI @ Squirrel AI | AI for Time Series, AI for Education, LLM & Agent | Hiring Interns\u002FFTEs (Seattle, Shanghai, Remote)!","Squirrel Ai Learning","Seattle, WA",null,"https:\u002F\u002Fqingsongedu.github.io","https:\u002F\u002Fgithub.com\u002Fqingsongedu",2978,269,"2026-04-19T03:51:02","MIT",1,"","未说明",{"notes":89,"python":87,"dependencies":90},"该仓库是一个时间序列 Transformer 模型的综述资源列表（包含论文、代码链接等），本身不是一个可直接运行的软件工具，因此 README 中未提供具体的操作系统、硬件配置、Python 版本或依赖库安装要求。用户需根据列表中具体引用的各个模型（如 Autoformer, Informer, PatchTST 等）的独立仓库查阅其特定的运行环境需求。",[],[14,35],[93,94,95,96,97,98,99,100,101,102,103,104,105,106],"timeseries","transformer","forecasting","anomalydetection","classification","timeseries-analysis","time-series","time-series-forecasting","machine-learning","deep-learning","awesome","survey","transformers","review","2026-03-27T02:49:30.150509","2026-04-20T04:09:03.752893",[],[]]