diff --git a/locales/ar/modelProvider.json b/locales/ar/modelProvider.json
index 4c13bc8eecd39..0d45c639647b9 100644
--- a/locales/ar/modelProvider.json
+++ b/locales/ar/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "رمز HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "أدخل رمز الوصول الخاص بك لـ ModelScope، انقر [هنا](https://www.modelscope.cn/my/myaccesstoken) للحصول عليه",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "رمز الوصول لـ ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "اختبر ما إذا تم إدخال عنوان الوكيل بشكل صحيح",
diff --git a/locales/bg-BG/modelProvider.json b/locales/bg-BG/modelProvider.json
index b09065aad0602..6b12fab47d9e0 100644
--- a/locales/bg-BG/modelProvider.json
+++ b/locales/bg-BG/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace токен"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Въведете своя токен за достъп до ModelScope, кликнете [тук](https://www.modelscope.cn/my/myaccesstoken), за да го получите",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Токен за достъп до ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Тестване дали адресът на прокси е попълнен правилно",
diff --git a/locales/de-DE/modelProvider.json b/locales/de-DE/modelProvider.json
index a5c7d2d2db60f..59f8c11e7ba5d 100644
--- a/locales/de-DE/modelProvider.json
+++ b/locales/de-DE/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace-Token"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Geben Sie Ihren ModelScope-Zugriffstoken ein. Klicken Sie [hier](https://www.modelscope.cn/my/myaccesstoken), um ihn zu erhalten.",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope-Zugriffstoken"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Testen Sie, ob die Proxy-Adresse korrekt eingetragen wurde",
diff --git a/locales/en-US/modelProvider.json b/locales/en-US/modelProvider.json
index 3b6a69b944bf0..3f624f6a4da93 100644
--- a/locales/en-US/modelProvider.json
+++ b/locales/en-US/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace Token"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Enter your ModelScope access token, click [here](https://www.modelscope.cn/my/myaccesstoken) to obtain it",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope Access Token"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Test if the proxy address is correctly filled in",
diff --git a/locales/es-ES/modelProvider.json b/locales/es-ES/modelProvider.json
index a6063cd7a421a..569f36a6bc8b0 100644
--- a/locales/es-ES/modelProvider.json
+++ b/locales/es-ES/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Token de HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Enter your ModelScope access token, click [here](https://www.modelscope.cn/my/myaccesstoken) to obtain it",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope Access Token"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Prueba si la dirección del proxy de la interfaz se ha introducido correctamente",
diff --git a/locales/fa-IR/modelProvider.json b/locales/fa-IR/modelProvider.json
index 0aa5746e95127..b3e3104f693bd 100644
--- a/locales/fa-IR/modelProvider.json
+++ b/locales/fa-IR/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "توکن HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "توکن دسترسی ModelScope خود را وارد کنید، برای دریافت آن [اینجا](https://www.modelscope.cn/my/myaccesstoken) را کلیک کنید",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "توکن دسترسی ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "آزمایش کنید که آیا آدرس پروکسی به درستی وارد شده است",
diff --git a/locales/fr-FR/modelProvider.json b/locales/fr-FR/modelProvider.json
index ddae2ef19bbfe..0f0cb2f2d7761 100644
--- a/locales/fr-FR/modelProvider.json
+++ b/locales/fr-FR/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Jeton HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Entrez votre jeton d'accès ModelScope, cliquez [ici](https://www.modelscope.cn/my/myaccesstoken) pour l'obtenir",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Jeton d'accès ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Vérifiez si l'adresse du proxy est correctement saisie",
diff --git a/locales/it-IT/modelProvider.json b/locales/it-IT/modelProvider.json
index 0cf9b5bd45e14..5552cbde467c6 100644
--- a/locales/it-IT/modelProvider.json
+++ b/locales/it-IT/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Token HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Inserisci il tuo token di accesso ModelScope, clicca [qui](https://www.modelscope.cn/my/myaccesstoken) per ottenerlo",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Token di accesso ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Verifica se l'indirizzo del proxy è stato compilato correttamente",
diff --git a/locales/ja-JP/modelProvider.json b/locales/ja-JP/modelProvider.json
index 6c04830f81618..6d10b739cdf8a 100644
--- a/locales/ja-JP/modelProvider.json
+++ b/locales/ja-JP/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace トークン"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "ModelScope アクセストークンを入力してください。取得するには、[こちら](https://www.modelscope.cn/my/myaccesstoken) をクリックしてください",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope アクセストークン"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "プロキシアドレスが正しく入力されているかをテストします",
diff --git a/locales/ko-KR/modelProvider.json b/locales/ko-KR/modelProvider.json
index c75dd0cce9674..a4d16911daca1 100644
--- a/locales/ko-KR/modelProvider.json
+++ b/locales/ko-KR/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace 토큰"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "ModelScope 액세스 토큰을 입력하세요. 토큰을 얻으려면 [여기](https://www.modelscope.cn/my/myaccesstoken)를 클릭하세요",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope 액세스 토큰"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "프록시 주소가 올바르게 입력되었는지 테스트합니다",
diff --git a/locales/nl-NL/modelProvider.json b/locales/nl-NL/modelProvider.json
index 1ac1b568cfde1..a17d55a1f4cce 100644
--- a/locales/nl-NL/modelProvider.json
+++ b/locales/nl-NL/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace Token"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Voer je ModelScope toegangstoken in, klik [hier](https://www.modelscope.cn/my/myaccesstoken) om het te verkrijgen",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope Toegangstoken"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Test of het proxyadres correct is ingevuld",
diff --git a/locales/pl-PL/modelProvider.json b/locales/pl-PL/modelProvider.json
index 58a538f925c4b..b3ec26a520d32 100644
--- a/locales/pl-PL/modelProvider.json
+++ b/locales/pl-PL/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Token HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Wprowadź swój token dostępu ModelScope, kliknij [tutaj](https://www.modelscope.cn/my/myaccesstoken), aby uzyskać",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Token dostępu ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Test czy adres proxy jest poprawnie wypełniony",
diff --git a/locales/pt-BR/modelProvider.json b/locales/pt-BR/modelProvider.json
index 3b2f70b2ced1b..5904a5a130738 100644
--- a/locales/pt-BR/modelProvider.json
+++ b/locales/pt-BR/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Token do HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Insira seu token de acesso do ModelScope, clique [aqui](https://www.modelscope.cn/my/myaccesstoken) para obter",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Token de Acesso do ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Teste se o endereço do proxy está corretamente preenchido",
diff --git a/locales/ru-RU/modelProvider.json b/locales/ru-RU/modelProvider.json
index e8b6e963cf007..4be23fcbb24e1 100644
--- a/locales/ru-RU/modelProvider.json
+++ b/locales/ru-RU/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Токен HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Введите токен доступа ModelScope, нажмите [здесь](https://www.modelscope.cn/my/myaccesstoken), чтобы получить",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Токен доступа ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Проверить правильность адреса прокси",
diff --git a/locales/tr-TR/modelProvider.json b/locales/tr-TR/modelProvider.json
index 2efb5b416f915..99b7c63a9dc83 100644
--- a/locales/tr-TR/modelProvider.json
+++ b/locales/tr-TR/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace Token"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "ModelScope erişim belirtecini girin, almak için [buraya](https://www.modelscope.cn/my/myaccesstoken) tıklayın",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope Erişim Belirteci"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Proxy adresinin doğru girilip girilmediğini test edin",
diff --git a/locales/vi-VN/modelProvider.json b/locales/vi-VN/modelProvider.json
index f899b34615390..bb9ce4c3bf45e 100644
--- a/locales/vi-VN/modelProvider.json
+++ b/locales/vi-VN/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "Mã thông báo HuggingFace"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "Nhập mã thông báo truy cập ModelScope của bạn, nhấp vào [đây](https://www.modelscope.cn/my/myaccesstoken) để lấy",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "Mã thông báo truy cập ModelScope"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "Kiểm tra địa chỉ proxy có được nhập chính xác không",
diff --git a/locales/zh-CN/modelProvider.json b/locales/zh-CN/modelProvider.json
index d205de220f5f5..567a92072e93f 100644
--- a/locales/zh-CN/modelProvider.json
+++ b/locales/zh-CN/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace Token"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "填入你的 ModelScope 访问令牌,点击 [这里](https://www.modelscope.cn/my/myaccesstoken) 获取",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope 访问令牌"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "测试代理地址是否正确填写",
diff --git a/locales/zh-TW/modelProvider.json b/locales/zh-TW/modelProvider.json
index 6afea77a200ed..3ba8a3e5784bc 100644
--- a/locales/zh-TW/modelProvider.json
+++ b/locales/zh-TW/modelProvider.json
@@ -77,6 +77,13 @@
       "title": "HuggingFace Token"
     }
   },
+  "modelscope": {
+    "accessToken": {
+      "desc": "填入你的 ModelScope 訪問令牌,點擊 [這裡](https://www.modelscope.cn/my/myaccesstoken) 獲取",
+      "placeholder": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
+      "title": "ModelScope 訪問令牌"
+    }
+  },
   "ollama": {
     "checker": {
       "desc": "測試代理地址是否正確填寫",
diff --git a/src/app/(main)/settings/llm/ProviderList/ModelScope/index.tsx b/src/app/(main)/settings/llm/ProviderList/ModelScope/index.tsx
new file mode 100644
index 0000000000000..001804227df93
--- /dev/null
+++ b/src/app/(main)/settings/llm/ProviderList/ModelScope/index.tsx
@@ -0,0 +1,53 @@
+'use client';
+
+import { Markdown } from '@lobehub/ui';
+import { Input } from 'antd';
+import { createStyles } from 'antd-style';
+import { useTranslation } from 'react-i18next';
+
+import { ModelScopeProviderCard } from '@/config/modelProviders';
+import { GlobalLLMProviderKey } from '@/types/user/settings';
+
+import { KeyVaultsConfigKey, LLMProviderApiTokenKey } from '../../const';
+import { ProviderItem } from '../../type';
+
+const useStyles = createStyles(({ css, token }) => ({
+  markdown: css`
+    p {
+      color: ${token.colorTextDescription} !important;
+    }
+  `,
+  tip: css`
+    font-size: 12px;
+    color: ${token.colorTextDescription};
+  `,
+}));
+
+const providerKey: GlobalLLMProviderKey = 'modelscope';
+
+// Same as OpenAIProvider, but replace API Key with ModelScope Access Token
+export const useModelScopeProvider = (): ProviderItem => {
+  const { t } = useTranslation('modelProvider');
+  const { styles } = useStyles();
+
+  return {
+    ...ModelScopeProviderCard,
+    apiKeyItems: [
+      {
+        children: (
+          <Input.Password
+            autoComplete={'new-password'}
+            placeholder={t(`${providerKey}.accessToken.placeholder`)}
+          />
+        ),
+        desc: (
+          <Markdown className={styles.markdown} fontSize={12} variant={'chat'}>
+            {t(`${providerKey}.accessToken.desc`)}
+          </Markdown>
+        ),
+        label: t(`${providerKey}.accessToken.title`),
+        name: [KeyVaultsConfigKey, providerKey, LLMProviderApiTokenKey],
+      },
+    ],
+  };
+};
diff --git a/src/app/(main)/settings/llm/ProviderList/providers.tsx b/src/app/(main)/settings/llm/ProviderList/providers.tsx
index f108683bfa07f..cf664d3ca3af3 100644
--- a/src/app/(main)/settings/llm/ProviderList/providers.tsx
+++ b/src/app/(main)/settings/llm/ProviderList/providers.tsx
@@ -15,6 +15,7 @@ import {
   InternLMProviderCard,
   MinimaxProviderCard,
   MistralProviderCard,
+  ModelScopeProviderCard,
   MoonshotProviderCard,
   NovitaProviderCard,
   OpenRouterProviderCard,
@@ -38,6 +39,7 @@ import { useBedrockProvider } from './Bedrock';
 import { useCloudflareProvider } from './Cloudflare';
 import { useGithubProvider } from './Github';
 import { useHuggingFaceProvider } from './HuggingFace';
+import { useModelScopeProvider } from './ModelScope';
 import { useOllamaProvider } from './Ollama';
 import { useOpenAIProvider } from './OpenAI';
 import { useWenxinProvider } from './Wenxin';
@@ -51,6 +53,7 @@ export const useProviderList = (): ProviderItem[] => {
   const GithubProvider = useGithubProvider();
   const HuggingFaceProvider = useHuggingFaceProvider();
   const WenxinProvider = useWenxinProvider();
+  const ModelScopeProvider = useModelScopeProvider();
 
   return useMemo(
     () => [
@@ -91,6 +94,7 @@ export const useProviderList = (): ProviderItem[] => {
       SiliconCloudProviderCard,
       HigressProviderCard,
       GiteeAIProviderCard,
+      ModelScopeProviderCard,
     ],
     [
       AzureProvider,
@@ -101,6 +105,7 @@ export const useProviderList = (): ProviderItem[] => {
       GithubProvider,
       WenxinProvider,
       HuggingFaceProvider,
+      ModelScopeProvider,
     ],
   );
 };
diff --git a/src/config/llm.ts b/src/config/llm.ts
index 89b0ac242638c..9f8c04a38386d 100644
--- a/src/config/llm.ts
+++ b/src/config/llm.ts
@@ -123,6 +123,9 @@ export const getLLMConfig = () => {
 
       ENABLED_HIGRESS: z.boolean(),
       HIGRESS_API_KEY: z.string().optional(),
+
+      ENABLED_MODELSCOPE: z.boolean(),
+      MODELSCOPE_API_KEY: z.string().optional(),
     },
     runtimeEnv: {
       API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
@@ -244,6 +247,9 @@ export const getLLMConfig = () => {
 
       ENABLED_HIGRESS: !!process.env.HIGRESS_API_KEY,
       HIGRESS_API_KEY: process.env.HIGRESS_API_KEY,
+
+      ENABLED_MODELSCOPE: !!process.env.MODELSCOPE_API_KEY,
+      MODELSCOPE_API_KEY: process.env.MODELSCOPE_API_KEY,
     },
   });
 };
diff --git a/src/config/modelProviders/index.ts b/src/config/modelProviders/index.ts
index b4a77da4777a2..4841b33a17300 100644
--- a/src/config/modelProviders/index.ts
+++ b/src/config/modelProviders/index.ts
@@ -19,6 +19,7 @@ import HunyuanProvider from './hunyuan';
 import InternLMProvider from './internlm';
 import MinimaxProvider from './minimax';
 import MistralProvider from './mistral';
+import ModelScopeProvider from './modelscope';
 import MoonshotProvider from './moonshot';
 import NovitaProvider from './novita';
 import OllamaProvider from './ollama';
@@ -49,6 +50,7 @@ export const LOBE_DEFAULT_MODEL_LIST: ChatModelCard[] = [
   GithubProvider.chatModels,
   MinimaxProvider.chatModels,
   MistralProvider.chatModels,
+  ModelScopeProvider.chatModels,
   MoonshotProvider.chatModels,
   OllamaProvider.chatModels,
   OpenRouterProvider.chatModels,
@@ -106,6 +108,7 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
   ZeroOneProvider,
   SenseNovaProvider,
   StepfunProvider,
+  ModelScopeProvider,
   MoonshotProvider,
   BaichuanProvider,
   MinimaxProvider,
@@ -145,6 +148,7 @@ export { default as HunyuanProviderCard } from './hunyuan';
 export { default as InternLMProviderCard } from './internlm';
 export { default as MinimaxProviderCard } from './minimax';
 export { default as MistralProviderCard } from './mistral';
+export { default as ModelScopeProviderCard } from './modelscope';
 export { default as MoonshotProviderCard } from './moonshot';
 export { default as NovitaProviderCard } from './novita';
 export { default as OllamaProviderCard } from './ollama';
diff --git a/src/config/modelProviders/modelscope.ts b/src/config/modelProviders/modelscope.ts
new file mode 100644
index 0000000000000..4b362ccfcaac2
--- /dev/null
+++ b/src/config/modelProviders/modelscope.ts
@@ -0,0 +1,80 @@
+import { ModelProviderCard } from '@/types/llm';
+
+const ModelScope: ModelProviderCard = {
+  chatModels: [
+    {
+      contextWindowTokens: 32_768,
+      description: '阿里云通义千问团队开发的大型语言模型',
+      displayName: 'Qwen 2.5 7B Instruct',
+      id: 'Qwen/Qwen2.5-7B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: '阿里云通义千问团队开发的大型语言模型',
+      displayName: 'Qwen 2.5 14B Instruct',
+      id: 'Qwen/Qwen2.5-14B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: '阿里云通义千问团队开发的大型语言模型',
+      displayName: 'Qwen 2.5 32B Instruct',
+      id: 'Qwen/Qwen2.5-32B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: '阿里云通义千问团队开发的大型语言模型',
+      displayName: 'Qwen 2.5 72B Instruct',
+      id: 'Qwen/Qwen2.5-72B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: 'Qwen2.5-Coder 专注于代码编写',
+      displayName: 'Qwen 2.5 Coder 7B Instruct',
+      id: 'Qwen/Qwen2.5-Coder-7B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: 'Qwen2.5-Coder 专注于代码编写',
+      displayName: 'Qwen 2.5 Coder 14B Instruct',
+      id: 'Qwen/Qwen2.5-Coder-14B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: 'Qwen2.5-Coder 专注于代码编写',
+      displayName: 'Qwen 2.5 Coder 32B Instruct',
+      id: 'Qwen/Qwen2.5-Coder-32B-Instruct',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: 'Qwen QwQ 是由 Qwen 团队开发的实验研究模型,专注于提升AI推理能力。',
+      displayName: 'QwQ 32B Preview',
+      id: 'Qwen/QwQ-32B-Preview',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: 'Qwen QwQ 是由 Qwen 团队开发的实验研究模型,专注于提升AI推理能力。',
+      displayName: 'QwQ 72B Preview',
+      id: 'Qwen/QwQ-72B-Preview',
+    },
+    {
+      contextWindowTokens: 32_768,
+      description: 'Llama 是由 Meta 团队开发的大型语言模型',
+      displayName: 'Llama 3.3 70B Instruct',
+      id: 'LLM-Research/Llama-3.3-70B-Instruct',
+    },
+  ],
+  checkModel: 'Qwen/Qwen2.5-32B-Instruct',
+  description:
+    'ModelScope Inference API 提供了一种快速且免费的方式,让您可以探索成千上万种模型,适用于各种任务。无论您是在为新应用程序进行原型设计,还是在尝试机器学习的功能,这个 API 都能让您即时访问多个领域的高性能模型。',
+  disableBrowserRequest: true,
+  id: 'modelscope',
+  modelList: { showModelFetcher: true },
+  modelsUrl: 'https://modelscope.cn/docs/model-service/API-Inference/intro',
+  name: 'ModelScope',
+  proxyUrl: {
+    placeholder: 'https://api-inference.modelscope.cn/v1',
+  },
+  url: 'https://modelscope.cn',
+};
+
+export default ModelScope;
diff --git a/src/libs/agent-runtime/AgentRuntime.ts b/src/libs/agent-runtime/AgentRuntime.ts
index c70c5d4c153e8..2102772b79b36 100644
--- a/src/libs/agent-runtime/AgentRuntime.ts
+++ b/src/libs/agent-runtime/AgentRuntime.ts
@@ -22,6 +22,7 @@ import { LobeHunyuanAI } from './hunyuan';
 import { LobeInternLMAI } from './internlm';
 import { LobeMinimaxAI } from './minimax';
 import { LobeMistralAI } from './mistral';
+import { LobeModelScopeAI } from './modelscope';
 import { LobeMoonshotAI } from './moonshot';
 import { LobeNovitaAI } from './novita';
 import { LobeOllamaAI } from './ollama';
@@ -149,6 +150,7 @@ class AgentRuntime {
       internlm: Partial<ClientOptions>;
       minimax: Partial<ClientOptions>;
       mistral: Partial<ClientOptions>;
+      modelscope: Partial<ClientOptions>;
       moonshot: Partial<ClientOptions>;
       novita: Partial<ClientOptions>;
       ollama: Partial<ClientOptions>;
@@ -242,6 +244,11 @@ class AgentRuntime {
         break;
       }
 
+      case ModelProvider.ModelScope: {
+        runtimeModel = new LobeModelScopeAI(params.modelscope);
+        break;
+      }
+
       case ModelProvider.Groq: {
         runtimeModel = new LobeGroq(params.groq);
         break;
diff --git a/src/libs/agent-runtime/modelscope/index.ts b/src/libs/agent-runtime/modelscope/index.ts
new file mode 100644
index 0000000000000..7ee9b26898e21
--- /dev/null
+++ b/src/libs/agent-runtime/modelscope/index.ts
@@ -0,0 +1,10 @@
+import { ModelProvider } from '../types';
+import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
+
+export const LobeModelScopeAI = LobeOpenAICompatibleFactory({
+  baseURL: 'https://api-inference.modelscope.cn/v1',
+  debug: {
+    chatCompletion: () => process.env.DEBUG_MODELSCOPE_CHAT_COMPLETION === '1',
+  },
+  provider: ModelProvider.ModelScope,
+});
diff --git a/src/libs/agent-runtime/types/type.ts b/src/libs/agent-runtime/types/type.ts
index 7c0af8abc53ff..d65980518d9e6 100644
--- a/src/libs/agent-runtime/types/type.ts
+++ b/src/libs/agent-runtime/types/type.ts
@@ -41,6 +41,7 @@ export enum ModelProvider {
   InternLM = 'internlm',
   Minimax = 'minimax',
   Mistral = 'mistral',
+  ModelScope = 'modelscope',
   Moonshot = 'moonshot',
   Novita = 'novita',
   Ollama = 'ollama',
diff --git a/src/locales/default/modelProvider.ts b/src/locales/default/modelProvider.ts
index fdb0555e41bbd..a56e1091aff14 100644
--- a/src/locales/default/modelProvider.ts
+++ b/src/locales/default/modelProvider.ts
@@ -78,6 +78,13 @@ export default {
       title: 'HuggingFace Token',
     },
   },
+  modelscope: {
+    accessToken: {
+      desc: '填入你的 ModelScope 访问令牌,点击 [这里](https://www.modelscope.cn/my/myaccesstoken) 获取',
+      placeholder: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx',
+      title: 'ModelScope 访问令牌',
+    },
+  },
   ollama: {
     checker: {
       desc: '测试代理地址是否正确填写',
diff --git a/src/types/user/settings/keyVaults.ts b/src/types/user/settings/keyVaults.ts
index c7dfc030b2eda..0e706269613ab 100644
--- a/src/types/user/settings/keyVaults.ts
+++ b/src/types/user/settings/keyVaults.ts
@@ -47,6 +47,7 @@ export interface UserKeyVaults {
   lobehub?: any;
   minimax?: OpenAICompatibleKeyVault;
   mistral?: OpenAICompatibleKeyVault;
+  modelscope?: OpenAICompatibleKeyVault;
   moonshot?: OpenAICompatibleKeyVault;
   novita?: OpenAICompatibleKeyVault;
   ollama?: OpenAICompatibleKeyVault;