
ollama调用各大模型
聊天
调用deepseek
shell
curl --location 'http://localhost:11434/api/chat' \
--header 'Content-Type: application/json' \
--data '{
"model": "deepseek-r1:1.5b",
"messages": [
{"role": "user", "content": "你叫什么?"}
],
"stream": false
}'调用qwen
shell
curl --location 'http://localhost:11434/api/chat' \
--header 'Content-Type: application/json' \
--data '{
"model": "qwen2.5:3b",
"messages": [
{"role": "user", "content": "分析一下产品A上个月的销售额,并解释原因,最后生成图表"}
],
"stream": false
}'字符串转向量
shell
curl --location 'http://localhost:11434/api/embeddings' \
--header 'Content-Type: application/json' \
--data '{
"model": "qwen2.5:3b",
"prompt": "你好"
}'
