{"payload":{"pageCount":3,"repositories":[{"type":"Public","name":"depyf","owner":"thuml","isFork":false,"description":"depyf is a tool to help you understand and adapt to PyTorch compiler torch.compile.","allTopics":["deep-learning","compiler","pytorch"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":421,"forksCount":10,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-16T23:50:25.646Z"}},{"type":"Public","name":"Time-Series-Library","owner":"thuml","isFork":false,"description":"A Library for Advanced Deep Time Series Models.","allTopics":["deep-learning","time-series"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":5,"issueCount":11,"starsCount":6349,"forksCount":1013,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-11T13:19:58.728Z"}},{"type":"Public","name":"Transolver","owner":"thuml","isFork":false,"description":"About code release of \"Transolver: A Fast Transformer Solver for PDEs on General Geometries\", ICML 2024 Spotlight. https://arxiv.org/abs/2402.02366","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":61,"forksCount":5,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,6,3,2,2,0,0,0,0,0,0,0,0,0,0,1,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-10T14:31:53.976Z"}},{"type":"Public","name":"iTransformer","owner":"thuml","isFork":false,"description":"Official implementation for \"iTransformer: Inverted Transformers Are Effective for Time Series Forecasting\" (ICLR 2024 Spotlight), https://openreview.net/forum?id=JePfAI8fah","allTopics":["transformer","time-series-forecasting"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":15,"starsCount":1146,"forksCount":200,"license":"MIT License","participation":[0,0,0,0,8,28,4,4,2,2,4,2,2,4,2,0,0,0,1,2,0,0,1,0,0,0,0,0,2,0,0,2,0,0,0,0,0,3,5,2,0,0,0,0,0,0,0,2,0,0,1,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-09T09:05:34.935Z"}},{"type":"Public","name":"Large-Time-Series-Model","owner":"thuml","isFork":false,"description":"Official code, datasets and checkpoints for \"Timer: Generative Pre-trained Transformers Are Large Time Series Models\" (ICML 2024) ","allTopics":["time-series","transformer","large-models","time-series-analysis"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":11,"starsCount":240,"forksCount":19,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-03T07:35:40.694Z"}},{"type":"Public","name":"AutoTimes","owner":"thuml","isFork":false,"description":"Official implementation for \"AutoTimes: Autoregressive Time Series Forecasters via Large Language Models\"","allTopics":["time-series-forecasting","large-language-models"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":52,"forksCount":4,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-02T09:54:50.914Z"}},{"type":"Public","name":"BTTackler","owner":"thuml","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-01T14:47:24.455Z"}},{"type":"Public","name":"iVideoGPT","owner":"thuml","isFork":false,"description":"Official repo for \"iVideoGPT: Interactive VideoGPTs are Scalable World Models\", https://arxiv.org/abs/2405.15223","allTopics":["model-based-reinforcement-learning","video-prediction","visual-planning","world-model"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":59,"forksCount":3,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,6,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-09-01T10:30:48.841Z"}},{"type":"Public","name":"timer","owner":"thuml","isFork":false,"description":"See the official code and checkpoints for \"Timer: Generative Pre-trained Transformers Are Large Time Series Models\"","allTopics":[],"primaryLanguage":{"name":"HTML","color":"#e34c26"},"pullRequestCount":0,"issueCount":0,"starsCount":6,"forksCount":1,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-08-19T08:44:44.544Z"}},{"type":"Public","name":"Nonstationary_Transformers","owner":"thuml","isFork":false,"description":"Code release for \"Non-stationary Transformers: Exploring the Stationarity in Time Series Forecasting\" (NeurIPS 2022), https://arxiv.org/abs/2205.14415","allTopics":["deep-learning","time-series","forecasting","non-stationary"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":2,"starsCount":463,"forksCount":73,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-08-19T03:35:19.913Z"}},{"type":"Public","name":"Multi-Embedding","owner":"thuml","isFork":false,"description":"About Code Release for \"On the Embedding Collapse When Scaling Up Recommendation Models\" (ICML 2024)","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":11,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-08-04T00:56:50.752Z"}},{"type":"Public","name":"awesome-multi-task-learning","owner":"thuml","isFork":false,"description":"2024 up-to-date list of DATASETS, CODEBASES and PAPERS on Multi-Task Learning (MTL), from Machine Learning perspective.","allTopics":["adapter","machine-learning","deep-neural-networks","computer-vision","deep-learning","awesome-list","transfer-learning","multi-task-learning","neural-language-processing","multi-domain-learning","loss-strategy","multi-task-optimization","multi-task-architecture"],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":635,"forksCount":47,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-28T07:49:05.474Z"}},{"type":"Public","name":"MobileAttention","owner":"thuml","isFork":false,"description":"Official implementation of \"Mobile Attention: Mobile-Friendly Linear-Attention for Vision Transformers in PyTorch\". To run the code, you can refer to https://github.com/thuml/Flowformer.","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":2,"forksCount":0,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-19T01:06:19.725Z"}},{"type":"Public","name":"HelmFluid","owner":"thuml","isFork":false,"description":"About code release of \"HelmFluid: Learning Helmholtz Dynamics for Interpretable Fluid Prediction\", ICML 2024. https://arxiv.org/pdf/2310.10565","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":11,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-18T17:23:33.134Z"}},{"type":"Public","name":"Flowformer","owner":"thuml","isFork":false,"description":"About Code release for \"Flowformer: Linearizing Transformers with Conservation Flows\" (ICML 2022), https://arxiv.org/pdf/2202.06258.pdf","allTopics":["deep-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":301,"forksCount":28,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-18T02:34:14.147Z"}},{"type":"Public","name":"Koopa","owner":"thuml","isFork":false,"description":"Code release for \"Koopa: Learning Non-stationary Time Series Dynamics with Koopman Predictors\" (NeurIPS 2023), https://arxiv.org/abs/2305.18803","allTopics":["deep-learning","time-series","forecasting","non-stationary"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":1,"starsCount":175,"forksCount":23,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-07-01T13:35:26.534Z"}},{"type":"Public","name":"HarmonyDream","owner":"thuml","isFork":false,"description":"Code release for \"HarmonyDream: Task Harmonization Inside World Models\" (ICML 2024), https://arxiv.org/abs/2310.00344","allTopics":["model-based-reinforcement-learning","world-models"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":21,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-27T05:30:00.451Z"}},{"type":"Public","name":"TimeSiam","owner":"thuml","isFork":false,"description":"","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":27,"forksCount":2,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-06-11T10:10:07.307Z"}},{"type":"Public","name":"Transfer-Learning-Library","owner":"thuml","isFork":false,"description":"Transfer Learning Library for Domain Adaptation, Task Adaptation, and Domain Generalization","allTopics":["semi-supervised-learning","self-training","transfer-learning","image-translation","adversarial-learning","finetune","unsupervised-domain-adaptation","dann","out-of-distribution-generalization","deep-learning","domain-adaptation"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":4,"issueCount":7,"starsCount":3335,"forksCount":551,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-10T06:59:42.213Z"}},{"type":"Public","name":"SimMTM","owner":"thuml","isFork":false,"description":"About Code release for \"SimMTM: A Simple Pre-Training Framework for Masked Time-Series Modeling\" (NeurIPS 2023 Spotlight), https://arxiv.org/abs/2302.00861","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":7,"starsCount":107,"forksCount":13,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-06T11:40:14.396Z"}},{"type":"Public","name":"TimesNet","owner":"thuml","isFork":false,"description":"About Code release for \"TimesNet: Temporal 2D-Variation Modeling for General Time Series Analysis\" (ICLR 2023), https://openreview.net/pdf?id=ju_Uqw384Oq","allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":682,"forksCount":61,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-02T17:50:56.374Z"}},{"type":"Public","name":"Latent-Spectral-Models","owner":"thuml","isFork":false,"description":"About Code Release for \"Solving High-Dimensional PDEs with Latent Spectral Models\" (ICML 2023), https://arxiv.org/abs/2301.12664","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":2,"starsCount":59,"forksCount":5,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-28T14:11:43.816Z"}},{"type":"Public","name":"ContextWM","owner":"thuml","isFork":false,"description":"Code release for \"Pre-training Contextualized World Models with In-the-wild Videos for Reinforcement Learning\" (NeurIPS 2023), https://arxiv.org/abs/2305.18499","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":53,"forksCount":2,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-22T13:01:59.386Z"}},{"type":"Public","name":"Autoformer","owner":"thuml","isFork":false,"description":"About Code release for \"Autoformer: Decomposition Transformers with Auto-Correlation for Long-Term Series Forecasting\" (NeurIPS 2021), https://arxiv.org/abs/2106.13008","allTopics":["deep-learning","time-series"],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":5,"starsCount":1896,"forksCount":410,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-15T08:58:09.706Z"}},{"type":"Public","name":"ForkMerge","owner":"thuml","isFork":false,"description":"Code release of paper \"ForkMerge: Mitigating Negative Transfer in Auxiliary-Task Learning\" (NeurIPS 2023)","allTopics":["multi-task-learning","auxiliary-task-learning"],"primaryLanguage":null,"pullRequestCount":0,"issueCount":1,"starsCount":14,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-12-30T07:05:40.858Z"}},{"type":"Public","name":"Anomaly-Transformer","owner":"thuml","isFork":false,"description":"About Code release for \"Anomaly Transformer: Time Series Anomaly Detection with Association Discrepancy\" (ICLR 2022 Spotlight), https://openreview.net/forum?id=LzQQ89U1qm_","allTopics":["deep-learning","time-series","anomaly-detection"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":28,"starsCount":717,"forksCount":186,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-12-29T11:36:58.993Z"}},{"type":"Public","name":"learn_torch.compile","owner":"thuml","isFork":false,"description":"torch.compile artifacts for common deep learning models, can be used as a learning resource for torch.compile ","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":13,"forksCount":0,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-12-22T07:19:38.668Z"}},{"type":"Public","name":"Corrformer","owner":"thuml","isFork":false,"description":"About code release of \"Interpretable Weather Forecasting for Worldwide Stations with a Unified Deep Model\", Nature Machine Intelligence, 2023. https://www.nature.com/articles/s42256-023-00667-9","allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":157,"forksCount":24,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-11-08T08:32:28.271Z"}},{"type":"Public","name":"predrnn-pytorch","owner":"thuml","isFork":false,"description":"Official implementation for NIPS'17 paper: PredRNN: Recurrent Neural Networks for Predictive Learning Using Spatiotemporal LSTMs.","allTopics":["recurrent-neural-networks","predictive-learning","video-prediction"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":6,"starsCount":439,"forksCount":111,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-11-04T01:54:31.943Z"}},{"type":"Public","name":"LogME","owner":"thuml","isFork":false,"description":"Code release for \"LogME: Practical Assessment of Pre-trained Models for Transfer Learning\" (ICML 2021) and Ranking and Tuning Pre-trained Models: A New Paradigm for Exploiting Model Hubs (JMLR 2022)","allTopics":["deep-learning"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":7,"starsCount":201,"forksCount":18,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-10-06T00:57:50.236Z"}}],"repositoryCount":82,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"thuml repositories"}