ollama通过模型参数选择对应模型

This commit is contained in:
junleea 2025-03-24 21:45:16 +08:00
parent b17df76df7
commit 3d9dea3ea7
1 changed files with 3 additions and 3 deletions

View File

@ -84,7 +84,7 @@ func WSReceiveMessageService(userID, sessionID int, channel string, msg proto.WS
var resID uint var resID uint
//处理消息 //处理消息
if model.Type == "ollama" { if model.Type == "ollama" {
resErr, resID = ReceiveOllamaSession(userID, sessionID, channel, msg) resErr, resID = ReceiveOllamaSession(userID, sessionID, channel, msg, modelParam)
} else if model.Type == "spark" { } else if model.Type == "spark" {
resErr, resID = ReceiveSparkSession(userID, sessionID, channel, msg, modelParam) resErr, resID = ReceiveSparkSession(userID, sessionID, channel, msg, modelParam)
} else { } else {
@ -93,7 +93,7 @@ func WSReceiveMessageService(userID, sessionID int, channel string, msg proto.WS
return resErr, resID return resErr, resID
} }
func ReceiveOllamaSession(userID, sessionID int, channel string, msg proto.WSMessageReq) (error, uint) { func ReceiveOllamaSession(userID, sessionID int, channel string, msg proto.WSMessageReq, modelParam proto.ModelParam) (error, uint) {
var resErr error var resErr error
var resID uint var resID uint
var err error var err error
@ -109,7 +109,7 @@ func ReceiveOllamaSession(userID, sessionID int, channel string, msg proto.WSMes
log.Println("actx has get will use:", len(actx)) log.Println("actx has get will use:", len(actx))
} }
req := &api.GenerateRequest{ req := &api.GenerateRequest{
Model: "qwen2.5:0.5b", Model: modelParam.Model,
Context: actx, Context: actx,
Prompt: prompt, Prompt: prompt,
} }