You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: README.md
+16Lines changed: 16 additions & 0 deletions
Display the source diff
Display the rich diff
Original file line number
Diff line number
Diff line change
@@ -28,6 +28,22 @@
28
28
4. Build the server (`npm run build`)
29
29
5. Run (`npm start`)
30
30
31
+
#### Ollama Configuration Guide
32
+
33
+
- It's recommended if you can run bigger LLM than 14b parameter.
34
+
- You do not need to provide the API KEY
35
+
- Set LLM_PROVIDER to Ollama (It is going to connect to default ollama endpoint)
36
+
- Set LLM_MODELNAME to the model name you can see from Ollama using the command `ollama ls`
37
+
- It is recommended to set TOKEN_PROCESSING_CHARACTER_LIMIT between 10000-20000 (Approx 300-600 lines of code) if you are using low param LLM (ex. 8b, 14b)
Copy file name to clipboardExpand all lines: src/service/llm-factory.ts
+11Lines changed: 11 additions & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -3,6 +3,7 @@ import DeepSeekProvider from '@/llm/provider/deepseek'
3
3
importGoogleProviderfrom'@/llm/provider/google'
4
4
importLLMConfigfrom'@/llm/llm-config'
5
5
importdotenvfrom'dotenv'
6
+
import{OllamaProvider}from'@/llm/provider/ollama'
6
7
dotenv.config()
7
8
8
9
/**
@@ -19,11 +20,21 @@ export default class LLMFactory {
19
20
constapiKey=process.env.LLM_APIKEY
20
21
constmodelName=process.env.LLM_MODELNAME
21
22
23
+
if(!provider){
24
+
thrownewError('LLM Provider is not specified. Please set LLM_PROVIDER in the environment\nExample: LLM_PROVIDER=google, LLM_PROVIDER=deepseek, LLM_PROVIDER=ollama')
25
+
}
26
+
27
+
if(!modelName){
28
+
thrownewError('LLM Model name is not specified. Example: LLM_MODELNAME=llama3.3 for llama3.3')
0 commit comments