Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
bentwnghk committed Feb 5, 2025
2 parents 365a618 + f899c82 commit 78f1168
Show file tree
Hide file tree
Showing 34 changed files with 226 additions and 493 deletions.
494 changes: 5 additions & 489 deletions changelog/v1.json

Large diffs are not rendered by default.

4 changes: 4 additions & 0 deletions locales/ar/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "معرف النموذج"
},
"modalTitle": "تكوين النموذج المخصص",
"reasoning": {
"extra": "هذا الإعداد سيفتح فقط قدرة النموذج على التفكير العميق، التأثير الفعلي يعتمد بالكامل على النموذج نفسه، يرجى اختبار ما إذا كان هذا النموذج يمتلك القدرة على التفكير العميق القابل للاستخدام",
"title": "يدعم التفكير العميق"
},
"tokens": {
"extra": "تعيين الحد الأقصى لعدد الرموز المدعومة من قبل النموذج",
"title": "أقصى نافذة سياق",
Expand Down
4 changes: 4 additions & 0 deletions locales/bg-BG/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID на модела"
},
"modalTitle": "Конфигурация на персонализиран модел",
"reasoning": {
"extra": "Тази конфигурация ще активира само способността на модела за дълбоко мислене, конкретният ефект зависи изцяло от самия модел, моля, тествайте сами дали моделът притежава налична способност за дълбоко мислене",
"title": "Поддръжка на дълбоко мислене"
},
"tokens": {
"extra": "Настройте максималния брой токени, поддържани от модела",
"title": "Максимален контекстуален прозорец",
Expand Down
4 changes: 4 additions & 0 deletions locales/de-DE/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "Modell-ID"
},
"modalTitle": "Benutzerdefinierte Modellkonfiguration",
"reasoning": {
"extra": "Diese Konfiguration aktiviert nur die Fähigkeit des Modells zu tiefem Denken. Die tatsächlichen Ergebnisse hängen vollständig vom Modell selbst ab. Bitte testen Sie selbst, ob das Modell über die Fähigkeit zum tiefen Denken verfügt.",
"title": "Unterstützung für tiefes Denken"
},
"tokens": {
"extra": "Maximale Token-Anzahl für das Modell festlegen",
"title": "Maximales Kontextfenster",
Expand Down
4 changes: 4 additions & 0 deletions locales/en-US/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "Model ID"
},
"modalTitle": "Custom Model Configuration",
"reasoning": {
"extra": "This configuration will enable the model's deep thinking capabilities, and the specific effects depend entirely on the model itself. Please test whether this model has usable deep thinking abilities.",
"title": "Support Deep Thinking"
},
"tokens": {
"extra": "Set the maximum number of tokens supported by the model",
"title": "Maximum Context Window",
Expand Down
4 changes: 4 additions & 0 deletions locales/es-ES/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID del modelo"
},
"modalTitle": "Configuración del modelo personalizado",
"reasoning": {
"extra": "Esta configuración solo activará la capacidad de pensamiento profundo del modelo, el efecto específico depende completamente del modelo en sí, por favor, pruebe si este modelo tiene la capacidad de pensamiento profundo utilizable",
"title": "Soporte para pensamiento profundo"
},
"tokens": {
"extra": "Establecer el número máximo de tokens que el modelo puede soportar",
"title": "Máximo de ventana de contexto",
Expand Down
4 changes: 4 additions & 0 deletions locales/fa-IR/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "شناسه مدل"
},
"modalTitle": "پیکربندی مدل سفارشی",
"reasoning": {
"extra": "این تنظیم فقط قابلیت تفکر عمیق مدل را فعال می‌کند و تأثیر دقیق آن کاملاً به خود مدل بستگی دارد، لطفاً خودتان آزمایش کنید که آیا این مدل قابلیت تفکر عمیق قابل استفاده را دارد یا خیر",
"title": "پشتیبانی از تفکر عمیق"
},
"tokens": {
"extra": "حداکثر تعداد توکن‌های پشتیبانی شده توسط مدل را تنظیم کنید",
"title": "حداکثر پنجره زمینه",
Expand Down
4 changes: 4 additions & 0 deletions locales/fr-FR/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID du modèle"
},
"modalTitle": "Configuration du modèle personnalisé",
"reasoning": {
"extra": "Cette configuration activera uniquement la capacité de réflexion approfondie du modèle. Les résultats dépendent entièrement du modèle lui-même, veuillez tester si ce modèle possède une capacité de réflexion approfondie utilisable.",
"title": "Support de la réflexion approfondie"
},
"tokens": {
"extra": "Définir le nombre maximal de tokens pris en charge par le modèle",
"title": "Fenêtre de contexte maximale",
Expand Down
4 changes: 4 additions & 0 deletions locales/it-IT/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID del modello"
},
"modalTitle": "Configurazione modello personalizzato",
"reasoning": {
"extra": "Questa configurazione attiverà solo la capacità di pensiero profondo del modello; l'effetto specifico dipende interamente dal modello stesso. Si prega di testare autonomamente se il modello possiede una capacità di pensiero profondo utilizzabile.",
"title": "Supporto per il pensiero profondo"
},
"tokens": {
"extra": "Imposta il numero massimo di token supportati dal modello",
"title": "Finestra di contesto massima",
Expand Down
4 changes: 4 additions & 0 deletions locales/ja-JP/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "モデル ID"
},
"modalTitle": "カスタムモデル設定",
"reasoning": {
"extra": "この設定は、モデルの深い思考能力を有効にするだけです。具体的な効果はモデル自体に依存しますので、このモデルが利用可能な深い思考能力を持っているかどうかはご自身でテストしてください。",
"title": "深い思考をサポート"
},
"tokens": {
"extra": "モデルがサポートする最大トークン数を設定する",
"title": "最大コンテキストウィンドウ",
Expand Down
4 changes: 4 additions & 0 deletions locales/ko-KR/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "모델 ID"
},
"modalTitle": "사용자 정의 모델 구성",
"reasoning": {
"extra": "이 설정은 모델의 심층 사고 능력만을 활성화합니다. 구체적인 효과는 모델 자체에 따라 다르므로, 해당 모델이 사용 가능한 심층 사고 능력을 갖추고 있는지 직접 테스트해 보시기 바랍니다.",
"title": "심층 사고 지원"
},
"tokens": {
"extra": "모델이 지원하는 최대 토큰 수 설정",
"title": "최대 컨텍스트 창",
Expand Down
4 changes: 4 additions & 0 deletions locales/nl-NL/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "Model ID"
},
"modalTitle": "Configuratie van aangepast model",
"reasoning": {
"extra": "Deze configuratie schakelt alleen de mogelijkheid voor diepgaand denken van het model in. Het specifieke effect hangt volledig af van het model zelf, test zelf of dit model in staat is tot bruikbaar diepgaand denken.",
"title": "Ondersteuning voor diepgaand denken"
},
"tokens": {
"extra": "Stel het maximale aantal tokens in dat door het model wordt ondersteund",
"title": "Maximale contextvenster",
Expand Down
4 changes: 4 additions & 0 deletions locales/pl-PL/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID modelu"
},
"modalTitle": "Konfiguracja niestandardowego modelu",
"reasoning": {
"extra": "Ta konfiguracja włączy jedynie zdolność modelu do głębokiego myślenia, a konkretne efekty w pełni zależą od samego modelu. Proszę samodzielnie przetestować, czy model ma zdolność do głębokiego myślenia.",
"title": "Wsparcie dla głębokiego myślenia"
},
"tokens": {
"extra": "Ustaw maksymalną liczbę tokenów wspieranych przez model",
"title": "Maksymalne okno kontekstu",
Expand Down
4 changes: 4 additions & 0 deletions locales/pt-BR/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID do Modelo"
},
"modalTitle": "Configuração do Modelo Personalizado",
"reasoning": {
"extra": "Esta configuração ativará apenas a capacidade de pensamento profundo do modelo, e o efeito específico depende totalmente do próprio modelo. Por favor, teste se este modelo possui a capacidade de pensamento profundo utilizável.",
"title": "Suporte a Pensamento Profundo"
},
"tokens": {
"extra": "Configurar o número máximo de tokens suportados pelo modelo",
"title": "Janela de contexto máxima",
Expand Down
4 changes: 4 additions & 0 deletions locales/ru-RU/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID модели"
},
"modalTitle": "Настройка пользовательской модели",
"reasoning": {
"extra": "Эта настройка активирует возможность глубокого мышления модели, конкретный эффект полностью зависит от самой модели, пожалуйста, протестируйте, обладает ли модель доступной способностью к глубокому мышлению",
"title": "Поддержка глубокого мышления"
},
"tokens": {
"extra": "Установите максимальное количество токенов, поддерживаемое моделью",
"title": "Максимальное окно контекста",
Expand Down
4 changes: 4 additions & 0 deletions locales/tr-TR/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "Model ID"
},
"modalTitle": "Özel Model Yapılandırması",
"reasoning": {
"extra": "Bu yapılandırma yalnızca modelin derin düşünme yeteneğini açacaktır, belirli etkiler tamamen modelin kendisine bağlıdır, lütfen bu modelin kullanılabilir derin düşünme yeteneğine sahip olup olmadığını kendiniz test edin",
"title": "Derin düşünmeyi destekler"
},
"tokens": {
"extra": "Modelin desteklediği maksimum Token sayısını ayarlayın",
"title": "Maksimum bağlam penceresi",
Expand Down
4 changes: 4 additions & 0 deletions locales/vi-VN/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "ID mô hình"
},
"modalTitle": "Cấu hình mô hình tùy chỉnh",
"reasoning": {
"extra": "Cấu hình này sẽ chỉ kích hoạt khả năng suy nghĩ sâu của mô hình, hiệu quả cụ thể hoàn toàn phụ thuộc vào chính mô hình, vui lòng tự kiểm tra xem mô hình này có khả năng suy nghĩ sâu có thể sử dụng hay không",
"title": "Hỗ trợ suy nghĩ sâu"
},
"tokens": {
"extra": "Cài đặt số Token tối đa mà mô hình hỗ trợ",
"title": "Cửa sổ ngữ cảnh tối đa",
Expand Down
4 changes: 4 additions & 0 deletions locales/zh-CN/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "模型 ID"
},
"modalTitle": "自定义模型配置",
"reasoning": {
"extra": "此配置将仅开启模型深度思考的能力,具体效果完全取决于模型本身,请自行测试该模型是否具备可用的深度思考能力",
"title": "支持深度思考"
},
"tokens": {
"extra": "设置模型支持的最大 Token 数",
"title": "最大上下文窗口",
Expand Down
4 changes: 4 additions & 0 deletions locales/zh-TW/modelProvider.json
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,10 @@
"title": "模型 ID"
},
"modalTitle": "自定義模型配置",
"reasoning": {
"extra": "此配置將僅開啟模型深度思考的能力,具體效果完全取決於模型本身,請自行測試該模型是否具備可用的深度思考能力",
"title": "支持深度思考"
},
"tokens": {
"extra": "設定模型支持的最大 Token 數",
"title": "最大上下文窗口",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,14 @@ const ModelConfigForm = memo<ModelConfigFormProps>(
>
<Checkbox />
</Form.Item>
<Form.Item
extra={t('providerModels.item.modelConfig.reasoning.extra')}
label={t('providerModels.item.modelConfig.reasoning.title')}
name={['abilities', 'reasoning']}
valuePropName={'checked'}
>
<Checkbox />
</Form.Item>
{/*<Form.Item*/}
{/* extra={t('providerModels.item.modelConfig.files.extra')}*/}
{/* label={t('providerModels.item.modelConfig.files.title')}*/}
Expand Down
15 changes: 15 additions & 0 deletions src/hooks/useModelSupportReasoning.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { isDeprecatedEdition } from '@/const/version';
import { aiModelSelectors, useAiInfraStore } from '@/store/aiInfra';
import { useUserStore } from '@/store/user';
import { modelProviderSelectors } from '@/store/user/selectors';

export const useModelSupportReasoning = (model: string, provider: string) => {
const newValue = useAiInfraStore(aiModelSelectors.isModelSupportReasoning(model, provider));

// TODO: remove this in V2.0
const oldValue = useUserStore(modelProviderSelectors.isModelEnabledReasoning(model));
if (isDeprecatedEdition) return oldValue;
//

return newValue;
};
5 changes: 3 additions & 2 deletions src/libs/agent-runtime/siliconcloud/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,15 @@ export const LobeSiliconCloudAI = LobeOpenAICompatibleFactory({
const functionCallKeywords = [
'qwen/qwen2.5',
'thudm/glm-4',
'deepseek-ai/deepSeek',
'deepseek-ai/deepseek',
'internlm/internlm2_5',
'meta-llama/meta-llama-3.1',
'meta-llama/meta-llama-3.3',
];

const visionKeywords = [
'opengvlab/internvl',
'qwen/qvq',
'qwen/qwen2-vl',
'teleai/telemm',
'deepseek-ai/deepseek-vl',
Expand All @@ -74,7 +75,7 @@ export const LobeSiliconCloudAI = LobeOpenAICompatibleFactory({

return {
enabled: LOBE_DEFAULT_MODEL_LIST.find((m) => model.id.endsWith(m.id))?.enabled || false,
functionCall: functionCallKeywords.some(keyword => model.id.toLowerCase().includes(keyword)),
functionCall: functionCallKeywords.some(keyword => model.id.toLowerCase().includes(keyword)) && !model.id.toLowerCase().includes('deepseek-r1'),
id: model.id,
vision: visionKeywords.some(keyword => model.id.toLowerCase().includes(keyword)),
};
Expand Down
5 changes: 5 additions & 0 deletions src/locales/default/modelProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,11 @@ export default {
title: '模型 ID',
},
modalTitle: '自定义模型配置',
reasoning: {
extra:
'此配置将仅开启模型深度思考的能力,具体效果完全取决于模型本身,请自行测试该模型是否具备可用的深度思考能力',
title: '支持深度思考',
},
tokens: {
extra: '设置模型支持的最大 Token 数',
title: '最大上下文窗口',
Expand Down
1 change: 1 addition & 0 deletions src/store/aiInfra/slices/aiModel/action.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ export const createAiModelSlice: StateCreator<
abilities: {
files: model.files,
functionCall: model.functionCall,
reasoning: model.reasoning,
vision: model.vision,
},
enabled: model.enabled || false,
Expand Down
7 changes: 7 additions & 0 deletions src/store/aiInfra/slices/aiModel/selectors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,12 @@ const isModelSupportVision = (id: string, provider: string) => (s: AIProviderSto
return model?.abilities?.vision;
};

const isModelSupportReasoning = (id: string, provider: string) => (s: AIProviderStoreState) => {
const model = getEnabledModelById(id, provider)(s);

return model?.abilities?.reasoning;
};

const isModelHasContextWindowToken =
(id: string, provider: string) => (s: AIProviderStoreState) => {
const model = getEnabledModelById(id, provider)(s);
Expand All @@ -71,6 +77,7 @@ export const aiModelSelectors = {
isModelEnabled,
isModelHasContextWindowToken,
isModelLoading,
isModelSupportReasoning,
isModelSupportToolUse,
isModelSupportVision,
modelContextWindowTokens,
Expand Down
4 changes: 4 additions & 0 deletions src/store/user/slices/modelList/selectors/modelProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,9 @@ const isModelEnabledFunctionCall = (id: string) => (s: UserStore) =>
const isModelEnabledVision = (id: string) => (s: UserStore) =>
getModelCardById(id)(s)?.vision || id.includes('vision');

const isModelEnabledReasoning = (id: string) => (s: UserStore) =>
getModelCardById(id)(s)?.reasoning || false;

const isModelEnabledFiles = (id: string) => (s: UserStore) => getModelCardById(id)(s)?.files;

const isModelEnabledUpload = (id: string) => (s: UserStore) =>
Expand All @@ -144,6 +147,7 @@ export const modelProviderSelectors = {
getModelCardsById,
isModelEnabledFiles,
isModelEnabledFunctionCall,
isModelEnabledReasoning,
isModelEnabledUpload,
isModelEnabledVision,
isModelHasMaxToken,
Expand Down
5 changes: 5 additions & 0 deletions src/types/aiModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ export interface ModelAbilities {
const AiModelAbilitiesSchema = z.object({
// files: z.boolean().optional(),
functionCall: z.boolean().optional(),
reasoning: z.boolean().optional(),
vision: z.boolean().optional(),
});

Expand Down Expand Up @@ -205,6 +206,10 @@ export interface AIRealtimeModelCard extends AIBaseModelCard {
* whether model supports function call
*/
functionCall?: boolean;
/**
* whether model supports reasoning
*/
reasoning?: boolean;
/**
* whether model supports vision
*/
Expand Down
9 changes: 9 additions & 0 deletions src/types/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,15 @@ export interface ChatModelCard {
legacy?: boolean;
maxOutput?: number;
pricing?: ChatModelPricing;

/**
* whether model supports reasoning
*/
reasoning?: boolean;

/**
* whether model is legacy (deprecated but not removed yet)
*/
releasedAt?: string;

/**
Expand Down
11 changes: 11 additions & 0 deletions src/utils/_deprecated/parseModels.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,17 @@ describe('parseModelString', () => {
});
});

it('token and reasoning', () => {
const result = parseModelString('deepseek-r1=Deepseek R1<65536:reasoning>');

expect(result.add[0]).toEqual({
displayName: 'Deepseek R1',
reasoning: true,
id: 'deepseek-r1',
contextWindowTokens: 65_536,
});
});

it('multi models', () => {
const result = parseModelString(
'gemini-1.5-flash-latest=Gemini 1.5 Flash<16000:vision>,gpt-4-all=ChatGPT Plus<128000:fc:vision:file>',
Expand Down
4 changes: 4 additions & 0 deletions src/utils/_deprecated/parseModels.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@ export const parseModelString = (modelString: string = '', withDeploymentName =

for (const capability of capabilityList) {
switch (capability) {
case 'reasoning': {
model.reasoning = true;
break;
}
case 'vision': {
model.vision = true;
break;
Expand Down
Loading

0 comments on commit 78f1168

Please sign in to comment.