|
@@ -49,6 +49,30 @@ ollama run vicuna
|
|
|
|
|
|
```
|
|
|
|
|
|
+
|
|
|
+相关GUI工具:
|
|
|
+
|
|
|
+
|
|
|
+chatbox
|
|
|
+```
|
|
|
+chatbot-ollama
|
|
|
+
|
|
|
+export DEFAULT_MODEL="deepseek-r1:1.5b" && export OLLAMA_HOST="http://localhost:8434" && cd /opt/chatbot-ollama && npm run dev # web 应用的启动命令
|
|
|
+
|
|
|
+```
|
|
|
+
|
|
|
+open-webui
|
|
|
+
|
|
|
+```
|
|
|
+export ENABLE_OPENAI_API=false && export OLLAMA_API_BASE_URL="http://localhost:8434" && open-webui serve # open-webui 的启动命令
|
|
|
+```
|
|
|
+
|
|
|
+anythingllm-server
|
|
|
+```
|
|
|
+cd /opt/anythingllm-template && yarn setup && export OLLAMA_HOST="http://localhost:8434" && env LLM_PROVIDER='ollama' OLLAMA_BASE_PATH='http://127.0.0.1:8434' OLLAMA_MODEL_PREF='deepseek-r1:1.5b' OLLAMA_MODEL_TOKEN_LIMIT=4096 EMBEDDING_ENGINE='ollama' EMBEDDING_BASE_PATH='http://127.0.0.1:8434' EMBEDDING_MODEL_PREF='bge-m3:latest' yarn dev:server # anythingllm server 的启动命令
|
|
|
+```
|
|
|
+
|
|
|
+
|
|
|
## Develop
|
|
|
|
|
|
安装基础依赖 git build-essential cmake go1.21
|