feat: add model field for chat_item and and chat_history data table

This commit is contained in:
RockYang 2024-01-26 16:54:00 +08:00
parent 1bcd0f4c1a
commit 023a2c2f09
11 changed files with 33 additions and 2 deletions

View File

@ -135,6 +135,7 @@ func (h *ChatHandler) sendAzureMessage(
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
@ -156,6 +157,7 @@ func (h *ChatHandler) sendAzureMessage(
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
@ -181,6 +183,7 @@ func (h *ChatHandler) sendAzureMessage(
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.db.Create(&chatItem)
}
}

View File

@ -160,6 +160,7 @@ func (h *ChatHandler) sendBaiduMessage(
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
@ -181,6 +182,7 @@ func (h *ChatHandler) sendBaiduMessage(
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
@ -205,6 +207,7 @@ func (h *ChatHandler) sendBaiduMessage(
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.db.Create(&chatItem)
}
}

View File

@ -139,6 +139,7 @@ func (h *ChatHandler) sendChatGLMMessage(
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
@ -160,6 +161,7 @@ func (h *ChatHandler) sendChatGLMMessage(
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
@ -184,6 +186,7 @@ func (h *ChatHandler) sendChatGLMMessage(
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.db.Create(&chatItem)
}
}

View File

@ -206,6 +206,7 @@ func (h *ChatHandler) sendOpenAiMessage(
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: useContext,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
@ -235,6 +236,7 @@ func (h *ChatHandler) sendOpenAiMessage(
Content: message.Content,
Tokens: totalTokens,
UseContext: useContext,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
@ -260,6 +262,7 @@ func (h *ChatHandler) sendOpenAiMessage(
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.db.Create(&chatItem)
}
}

View File

@ -160,6 +160,7 @@ func (h *ChatHandler) sendQWenMessage(
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
@ -181,6 +182,7 @@ func (h *ChatHandler) sendQWenMessage(
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
@ -205,6 +207,7 @@ func (h *ChatHandler) sendQWenMessage(
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.db.Create(&chatItem)
}
}

View File

@ -198,6 +198,7 @@ func (h *ChatHandler) sendXunFeiMessage(
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
@ -219,6 +220,7 @@ func (h *ChatHandler) sendXunFeiMessage(
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
@ -243,6 +245,7 @@ func (h *ChatHandler) sendXunFeiMessage(
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.db.Create(&chatItem)
}
}

View File

@ -7,6 +7,7 @@ type HistoryMessage struct {
ChatId string // 会话 ID
UserId uint // 用户 ID
RoleId uint // 角色 ID
Model string // AI模型
Type string
Icon string
Tokens int

View File

@ -7,7 +7,8 @@ type ChatItem struct {
ChatId string `gorm:"column:chat_id;unique"` // 会话 ID
UserId uint // 用户 ID
RoleId uint // 角色 ID
ModelId uint // 会话模型
ModelId uint // 模型 ID
Model string // 模型
Title string // 会话标题
DeletedAt gorm.DeletedAt
}

View File

@ -5,6 +5,7 @@ type HistoryMessage struct {
ChatId string `json:"chat_id"`
UserId uint `json:"user_id"`
RoleId uint `json:"role_id"`
Model string `json:"model"`
Type string `json:"type"`
Icon string `json:"icon"`
Tokens int `json:"tokens"`

View File

@ -7,5 +7,6 @@ type ChatItem struct {
RoleId uint `json:"role_id"`
ChatId string `json:"chat_id"`
ModelId uint `json:"model_id"`
Model string `json:"model"`
Title string `json:"title"`
}

View File

@ -1,4 +1,13 @@
ALTER TABLE `chatgpt_mj_jobs` ADD `err_msg` VARCHAR(255) DEFAULT NULL COMMENT '错误信息' AFTER `publish`;
ALTER TABLE `chatgpt_sd_jobs` ADD `err_msg` VARCHAR(255) DEFAULT NULL COMMENT '错误信息' AFTER `publish`;
ALTER TABLE `chatgpt_chat_items` ADD `model` VARCHAR(30) NULL COMMENT '模型名称' AFTER `model_id`;
ALTER TABLE `chatgpt_chat_items` ADD `model` VARCHAR(30) NULL COMMENT '模型名称' AFTER `model_id`;
ALTER TABLE `chatgpt_chat_history` ADD `model` VARCHAR(30) NULL COMMENT '模型名称' AFTER `role_id`;
-- 初始化对话数据
UPDATE chatgpt_chat_items s SET model=(SELECT value FROM chatgpt_chat_models WHERE id = s.model_id);
-- 初始化聊天记录数据
UPDATE chatgpt_chat_history s SET model=(SELECT model FROM chatgpt_chat_items WHERE chat_id = s.chat_id);
-- 清理对话已删除的聊天记录(可选)
-- DELETE FROM `chatgpt_chat_history` WHERE model is NULL;