Skip to content

Commit

Permalink
[Chore] Add DeepSeek in Ai Assistant (#2946)
Browse files Browse the repository at this point in the history
  • Loading branch information
lixun910 authored Jan 28, 2025
1 parent c5484e1 commit f292d61
Show file tree
Hide file tree
Showing 6 changed files with 120 additions and 61 deletions.
2 changes: 1 addition & 1 deletion examples/demo-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"@loaders.gl/csv": "^4.3.2",
"@loaders.gl/json": "^4.3.2",
"@loaders.gl/parquet": "^4.3.2",
"@openassistant/core": "^0.0.6",
"@openassistant/core": "^0.0.7",
"@openassistant/ui": "^0.0.7",
"@types/classnames": "^2.3.1",
"@types/keymirror": "^0.1.1",
Expand Down
10 changes: 5 additions & 5 deletions examples/demo-app/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4097,9 +4097,9 @@ __metadata:
languageName: node
linkType: hard

"@openassistant/core@npm:^0.0.6":
version: 0.0.6
resolution: "@openassistant/core@npm:0.0.6"
"@openassistant/core@npm:^0.0.7":
version: 0.0.7
resolution: "@openassistant/core@npm:0.0.7"
dependencies:
"@langchain/core": "npm:^0.3.26"
"@langchain/google-genai": "npm:^0.1.6"
Expand All @@ -4108,7 +4108,7 @@ __metadata:
axios: "npm:^1.7.9"
peerDependencies:
react: ">=18.2"
checksum: 10c0/cf88b2b503f99877820822a6c5f47133ae2d5bbb8ea4b714598833b79ebb30a990e6f4fa8b8ae654a37ad36462db0ddabbf71354ed5049cb1400ea7cfa31a2e0
checksum: 10c0/ac9432dbda04528197a8d1d69133054f5c87dbc9706412db47cfbff7e797821e163505b1c45304cb3dba52f4c67f9864b93e2d3c0e285d953676a27ee666a83d
languageName: node
linkType: hard

Expand Down Expand Up @@ -13018,7 +13018,7 @@ __metadata:
"@loaders.gl/csv": "npm:^4.3.2"
"@loaders.gl/json": "npm:^4.3.2"
"@loaders.gl/parquet": "npm:^4.3.2"
"@openassistant/core": "npm:^0.0.6"
"@openassistant/core": "npm:^0.0.7"
"@openassistant/ui": "npm:^0.0.7"
"@types/classnames": "npm:^2.3.1"
"@types/keymirror": "npm:^0.1.1"
Expand Down
5 changes: 3 additions & 2 deletions src/ai-assistant/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,15 @@
"@kepler.gl/table": "3.1.0-alpha.7",
"@kepler.gl/types": "3.1.0-alpha.7",
"@kepler.gl/utils": "3.1.0-alpha.7",
"@openassistant/core": "^0.0.6",
"@openassistant/core": "^0.0.7",
"@openassistant/echarts": "^0.0.6",
"@openassistant/geoda": "^0.0.6",
"@openassistant/ui": "^0.0.7",
"color-interpolate": "^1.0.5",
"echarts": "^5.5.1",
"global": "^4.3.0",
"react-intl": "^6.3.0"
"react-intl": "^6.3.0",
"usehooks-ts": "^3.1.0"
},
"nyc": {
"sourceMap": false,
Expand Down
110 changes: 62 additions & 48 deletions src/ai-assistant/src/components/ai-assistant-config.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,25 +15,8 @@ import {
import {AiAssistantConfig} from '../index';
import ApiKey from '../icons/api-key';
import {testApiKey} from '@openassistant/core';

const PROVIDER_MODELS = {
openai: ['o1-mini', 'o1-preview', 'gpt-4o', 'gpt-4o-mini', 'gpt-3.5-turbo-0125', 'gpt-3.5-turbo'],
google: ['gemini-2.0-flash-exp', 'gemini-1.5-flash', 'gemini-1.5-pro', 'gemini-1.0-pro'],
ollama: [
'phi4',
'qwen2.5-coder',
'qwq',
'llama3.3',
'llama3.2',
'llama3.1',
'llama3.1:70b',
'qwen2',
'llava',
'mistral',
'gemma2',
'phi3.5'
]
};
import PROVIDER_MODELS from '../config/models.json';
import {useLocalStorage} from 'usehooks-ts';

type ThemeProps = {theme: any};

Expand Down Expand Up @@ -143,12 +126,27 @@ function AiAssistantConfigFactory(RangeSlider: ReturnType<typeof RangeSliderFact
const AiAssistantConfig: React.FC<
AiAssistantConfigProps & WrappedComponentProps & ThemeProps
> = ({intl, aiAssistantConfig, updateAiAssistantConfig}) => {
const [provider, setProvider] = useState(aiAssistantConfig.provider || 'openai');
const [model, setModel] = useState(aiAssistantConfig.model || PROVIDER_MODELS[provider][0]);
const [apiKey, setApiKey] = useState(aiAssistantConfig.apiKey || '');
const [temperature, setTemperature] = useState(aiAssistantConfig.temperature || 0.8);
const [topP, setTopP] = useState(aiAssistantConfig.topP || 0.8);
const [baseUrl, setBaseUrl] = useState(aiAssistantConfig.baseUrl || 'http://localhost:11434');
const [provider, setProvider] = useLocalStorage(
'ai-assistant-provider',
aiAssistantConfig.provider || 'openai'
);
const [model, setModel] = useLocalStorage(
'ai-assistant-model',
aiAssistantConfig.model || PROVIDER_MODELS[provider][0]
);
const [apiKey, setApiKey] = useLocalStorage(
'ai-assistant-api-key',
aiAssistantConfig.apiKey || ''
);
const [temperature, setTemperature] = useLocalStorage(
'ai-assistant-temperature',
aiAssistantConfig.temperature || 0.8
);
const [topP, setTopP] = useLocalStorage('ai-assistant-top-p', aiAssistantConfig.topP || 0.8);
const [baseUrl, setBaseUrl] = useLocalStorage(
'ai-assistant-base-url',
aiAssistantConfig.baseUrl || 'http://localhost:11434'
);
const [connectionError, setConnectionError] = useState(false);
const [errorMessage, setErrorMessage] = useState('');
const [isRunning, setIsRunning] = useState(false);
Expand Down Expand Up @@ -191,29 +189,45 @@ function AiAssistantConfigFactory(RangeSlider: ReturnType<typeof RangeSliderFact

const onStartChat = async () => {
setIsRunning(true);
const {success, service} = await testApiKey({
modelProvider: provider,
modelName: model,
apiKey: apiKey,
baseUrl: baseUrl
});
const errorMessage = !success
? service === 'ollama'
? 'Connection failed: maybe invalid Ollama Base URL'
: 'Connection failed: maybe invalid API Key'
: '';
setConnectionError(!success);
setErrorMessage(errorMessage);
updateAiAssistantConfig({
provider: provider,
model: model,
apiKey: apiKey,
baseUrl: baseUrl,
isReady: success,
temperature: temperature,
topP: topP
});
setIsRunning(false);
try {
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error('Connection timeout after 15 seconds')), 15000);
});

const testPromise = testApiKey({
modelProvider: provider,
modelName: model,
apiKey: apiKey,
baseUrl: baseUrl
});

const result = (await Promise.race([testPromise, timeoutPromise])) as {
success: boolean;
service: string;
};
const {success, service} = result;
const errorMessage = !success
? service === 'ollama'
? 'Connection failed: maybe invalid Ollama Base URL'
: 'Connection failed: maybe invalid API Key'
: '';
setConnectionError(!success);
setErrorMessage(errorMessage);
updateAiAssistantConfig({
provider: provider,
model: model,
apiKey: apiKey,
baseUrl: baseUrl,
isReady: success,
temperature: temperature,
topP: topP
});
} catch (error) {
setConnectionError(true);
setErrorMessage(error instanceof Error ? error.message : 'Connection failed');
} finally {
setIsRunning(false);
}
};

return (
Expand Down
32 changes: 32 additions & 0 deletions src/ai-assistant/src/config/models.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"deepseek": ["deepseek-chat", "deepseek-reasoner"],
"openai": [
"o1-mini",
"o1-preview",
"o1",
"gpt-4o",
"gpt-4o-mini",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo"
],
"google": ["gemini-2.0-flash-exp", "gemini-1.5-flash", "gemini-1.5-pro", "gemini-1.0-pro"],
"ollama": [
"deepseek-r1",
"deepseek-r1:14b",
"deepseek-r1:32b",
"deepseek-r1:70b",
"deepseek-r1:671b",
"phi4",
"qwen2.5-coder",
"qwq",
"llama3.3",
"llama3.2",
"llama3.1",
"llama3.1:70b",
"qwen2",
"llava",
"mistral",
"gemma2",
"phi3.5"
]
}
22 changes: 17 additions & 5 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3196,14 +3196,15 @@ __metadata:
"@kepler.gl/table": "npm:3.1.0-alpha.7"
"@kepler.gl/types": "npm:3.1.0-alpha.7"
"@kepler.gl/utils": "npm:3.1.0-alpha.7"
"@openassistant/core": "npm:^0.0.6"
"@openassistant/core": "npm:^0.0.7"
"@openassistant/echarts": "npm:^0.0.6"
"@openassistant/geoda": "npm:^0.0.6"
"@openassistant/ui": "npm:^0.0.7"
color-interpolate: "npm:^1.0.5"
echarts: "npm:^5.5.1"
global: "npm:^4.3.0"
react-intl: "npm:^6.3.0"
usehooks-ts: "npm:^3.1.0"
languageName: unknown
linkType: soft

Expand Down Expand Up @@ -6276,9 +6277,9 @@ __metadata:
languageName: node
linkType: hard

"@openassistant/core@npm:^0.0.6":
version: 0.0.6
resolution: "@openassistant/core@npm:0.0.6"
"@openassistant/core@npm:^0.0.7":
version: 0.0.7
resolution: "@openassistant/core@npm:0.0.7"
dependencies:
"@langchain/core": "npm:^0.3.26"
"@langchain/google-genai": "npm:^0.1.6"
Expand All @@ -6287,7 +6288,7 @@ __metadata:
axios: "npm:^1.7.9"
peerDependencies:
react: ">=18.2"
checksum: 10c0/cf88b2b503f99877820822a6c5f47133ae2d5bbb8ea4b714598833b79ebb30a990e6f4fa8b8ae654a37ad36462db0ddabbf71354ed5049cb1400ea7cfa31a2e0
checksum: 10c0/ac9432dbda04528197a8d1d69133054f5c87dbc9706412db47cfbff7e797821e163505b1c45304cb3dba52f4c67f9864b93e2d3c0e285d953676a27ee666a83d
languageName: node
linkType: hard

Expand Down Expand Up @@ -29545,6 +29546,17 @@ __metadata:
languageName: node
linkType: hard

"usehooks-ts@npm:^3.1.0":
version: 3.1.0
resolution: "usehooks-ts@npm:3.1.0"
dependencies:
lodash.debounce: "npm:^4.0.8"
peerDependencies:
react: ^16.8.0 || ^17 || ^18
checksum: 10c0/2204d8c95109302bdaaa51a66bf216f3dba750f1d2795c20ecba75ba1c44a070a253935d537ef536514ab6e363bcc02ccc78b5ad63576ff8d880d577cf3fc48f
languageName: node
linkType: hard

"util-deprecate@npm:^1.0.1, util-deprecate@npm:^1.0.2, util-deprecate@npm:~1.0.1":
version: 1.0.2
resolution: "util-deprecate@npm:1.0.2"
Expand Down

0 comments on commit f292d61

Please sign in to comment.