feat: add gitee ai models

This commit is contained in:
Michael Yang 2024-09-08 12:37:05 +08:00
parent 1e01bedf9e
commit 36d18c1122
8 changed files with 356 additions and 18 deletions

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.agentsflex</groupId>
<artifactId>agents-flex-llm</artifactId>
<version>1.0.0-beta.9</version>
</parent>
<artifactId>agents-flex-llm-gitee</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>com.agentsflex</groupId>
<artifactId>agents-flex-core</artifactId>
<version>1.0.0-beta.9</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,83 @@
/*
* Copyright (c) 2023-2025, Agents-Flex (fuhai999@gmail.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agentsflex.llm.gitee;
import com.agentsflex.core.llm.ChatOptions;
import com.agentsflex.core.message.MessageStatus;
import com.agentsflex.core.parser.AiMessageParser;
import com.agentsflex.core.parser.FunctionMessageParser;
import com.agentsflex.core.parser.impl.DefaultAiMessageParser;
import com.agentsflex.core.parser.impl.DefaultFunctionMessageParser;
import com.agentsflex.core.prompt.DefaultPromptFormat;
import com.agentsflex.core.prompt.Prompt;
import com.agentsflex.core.prompt.PromptFormat;
import com.agentsflex.core.util.Maps;
import com.alibaba.fastjson.JSON;
import java.util.Map;
public class GiteeAiLLmUtil {
private static final PromptFormat promptFormat = new DefaultPromptFormat();
public static AiMessageParser getAiMessageParser(boolean isStream) {
DefaultAiMessageParser aiMessageParser = new DefaultAiMessageParser();
if (isStream) {
aiMessageParser.setContentPath("$.choices[0].delta.content");
} else {
aiMessageParser.setContentPath("$.choices[0].message.content");
}
aiMessageParser.setIndexPath("$.choices[0].index");
aiMessageParser.setStatusPath("$.choices[0].finish_reason");
aiMessageParser.setStatusParser(content -> parseMessageStatus((String) content));
aiMessageParser.setTotalTokensPath("$.usage.total_tokens");
aiMessageParser.setPromptTokensPath("$.usage.prompt_tokens");
aiMessageParser.setCompletionTokensPath("$.usage.completion_tokens");
return aiMessageParser;
}
public static FunctionMessageParser getFunctionMessageParser() {
DefaultFunctionMessageParser functionMessageParser = new DefaultFunctionMessageParser();
functionMessageParser.setFunctionNamePath("$.choices[0].message.tool_calls[0].function.name");
functionMessageParser.setFunctionArgsPath("$.choices[0].message.tool_calls[0].function.arguments");
functionMessageParser.setFunctionArgsParser(JSON::parseObject);
return functionMessageParser;
}
public static MessageStatus parseMessageStatus(String status) {
return "stop".equals(status) ? MessageStatus.END : MessageStatus.MIDDLE;
}
public static String promptToPayload(Prompt<?> prompt, GiteeAiLlmConfig config, ChatOptions options, boolean withStream) {
Map<String, Object> build = Maps.of("messages", promptFormat.toMessagesJsonObject(prompt))
.putIf(withStream, "stream", withStream)
.putIfNotNull("max_tokens", options.getMaxTokens())
.putIfNotNull("temperature", options.getTemperature())
.putIfNotNull("top_p", options.getTopP())
.putIfNotNull("top_k", options.getTopK())
.putIfNotEmpty("stop", options.getStop())
.build();
return JSON.toJSONString(build);
}
}

View File

@ -0,0 +1,144 @@
/*
* Copyright (c) 2023-2025, Agents-Flex (fuhai999@gmail.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agentsflex.llm.gitee;
import com.agentsflex.core.document.Document;
import com.agentsflex.core.llm.BaseLlm;
import com.agentsflex.core.llm.ChatOptions;
import com.agentsflex.core.llm.MessageResponse;
import com.agentsflex.core.llm.StreamResponseListener;
import com.agentsflex.core.llm.client.BaseLlmClientListener;
import com.agentsflex.core.llm.client.HttpClient;
import com.agentsflex.core.llm.client.LlmClient;
import com.agentsflex.core.llm.client.LlmClientListener;
import com.agentsflex.core.llm.client.impl.SseClient;
import com.agentsflex.core.llm.embedding.EmbeddingOptions;
import com.agentsflex.core.llm.response.AbstractBaseMessageResponse;
import com.agentsflex.core.llm.response.AiMessageResponse;
import com.agentsflex.core.llm.response.FunctionMessageResponse;
import com.agentsflex.core.parser.AiMessageParser;
import com.agentsflex.core.parser.FunctionMessageParser;
import com.agentsflex.core.prompt.FunctionPrompt;
import com.agentsflex.core.prompt.Prompt;
import com.agentsflex.core.store.VectorData;
import com.agentsflex.core.util.Maps;
import com.agentsflex.core.util.StringUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
public class GiteeAiLlm extends BaseLlm<GiteeAiLlmConfig> {
private final HttpClient httpClient = new HttpClient();
public AiMessageParser aiMessageParser = GiteeAiLLmUtil.getAiMessageParser(false);
public AiMessageParser streamMessageParser = GiteeAiLLmUtil.getAiMessageParser(true);
public FunctionMessageParser functionMessageParser = GiteeAiLLmUtil.getFunctionMessageParser();
public GiteeAiLlm(GiteeAiLlmConfig config) {
super(config);
}
@Override
public <R extends MessageResponse<?>> R chat(Prompt<R> prompt, ChatOptions options) {
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Authorization", "Bearer " + getConfig().getApiKey());
Consumer<Map<String, String>> headersConfig = config.getHeadersConfig();
if (headersConfig != null) {
headersConfig.accept(headers);
}
String payload = GiteeAiLLmUtil.promptToPayload(prompt, config, options, false);
String endpoint = config.getEndpoint();
String response = httpClient.post(endpoint + "/api/serverless/" + config.getModel() + "/chat/completions", headers, payload);
if (StringUtil.noText(response)) {
return null;
}
if (config.isDebug()) {
System.out.println(">>>>receive payload:" + response);
}
JSONObject jsonObject = JSON.parseObject(response);
JSONObject error = jsonObject.getJSONObject("error");
AbstractBaseMessageResponse<?> messageResponse;
if (prompt instanceof FunctionPrompt) {
messageResponse = new FunctionMessageResponse(((FunctionPrompt) prompt).getFunctions()
, functionMessageParser.parse(jsonObject));
} else {
messageResponse = new AiMessageResponse(aiMessageParser.parse(jsonObject));
}
if (error != null && !error.isEmpty()) {
messageResponse.setError(true);
messageResponse.setErrorMessage(error.getString("message"));
messageResponse.setErrorType(error.getString("type"));
messageResponse.setErrorCode(error.getString("code"));
}
//noinspection unchecked
return (R) messageResponse;
}
@Override
public <R extends MessageResponse<?>> void chatStream(Prompt<R> prompt, StreamResponseListener<R> listener, ChatOptions options) {
LlmClient llmClient = new SseClient();
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Authorization", "Bearer " + getConfig().getApiKey());
String payload = GiteeAiLLmUtil.promptToPayload(prompt, config, options, true);
String endpoint = config.getEndpoint();
LlmClientListener clientListener = new BaseLlmClientListener(this, llmClient, listener, prompt, streamMessageParser, functionMessageParser);
llmClient.start(endpoint + "/api/serverless/" + config.getModel() + "/chat/completions", headers, payload, clientListener, config);
}
@Override
public VectorData embed(Document document, EmbeddingOptions options) {
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Authorization", "Bearer " + getConfig().getApiKey());
String payload = Maps.of("inputs", document.getContent()).toJSON();
String endpoint = config.getEndpoint();
String embeddingModel = options.getModelOrDefault(config.getDefaultEmbeddingModal());
String response = httpClient.post(endpoint + "/api/serverless/" + embeddingModel + "/embeddings", headers, payload);
if (StringUtil.noText(response)) {
return null;
}
if (config.isDebug()) {
System.out.println(">>>>receive payload:" + response);
}
VectorData vectorData = new VectorData();
double[] embedding = JSONObject.parseObject(response, double[].class);
vectorData.setVector(embedding);
return vectorData;
}
}

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2023-2025, Agents-Flex (fuhai999@gmail.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agentsflex.llm.gitee;
import com.agentsflex.core.llm.LlmConfig;
public class GiteeAiLlmConfig extends LlmConfig {
private static final String DEFAULT_MODEL = "Qwen2-7B-Instruct";
private static final String DEFAULT_EMBEDDING_MODEL = "bge-large-zh-v1.5";
private static final String DEFAULT_ENDPOINT = "https://ai.gitee.com";
private String defaultEmbeddingModal = DEFAULT_EMBEDDING_MODEL;
public String getDefaultEmbeddingModal() {
return defaultEmbeddingModal;
}
public void setDefaultEmbeddingModal(String defaultEmbeddingModal) {
this.defaultEmbeddingModal = defaultEmbeddingModal;
}
public GiteeAiLlmConfig() {
setEndpoint(DEFAULT_ENDPOINT);
setModel(DEFAULT_MODEL);
}
}

View File

@ -0,0 +1,13 @@
package com.agentsflex.llm.gitee;
public class GiteeAITest {
public static void main(String[] args) {
GiteeAiLlmConfig config = new GiteeAiLlmConfig();
config.setApiKey("your-api-key");
GiteeAiLlm llm = new GiteeAiLlm(config);
String result = llm.chat("你好");
System.out.println(result);
}
}

View File

@ -0,0 +1,16 @@
package com.agentsflex.llm.gitee;
import com.agentsflex.core.document.Document;
import com.agentsflex.core.store.VectorData;
public class GiteeAiEbeddingTest {
public static void main(String[] args) {
GiteeAiLlmConfig config = new GiteeAiLlmConfig();
config.setApiKey("your-api-key");
GiteeAiLlm llm = new GiteeAiLlm(config);
VectorData result = llm.embed(Document.of("你好"));
System.out.println(result);
}
}

View File

@ -19,6 +19,7 @@
<module>agents-flex-llm-ollama</module>
<module>agents-flex-llm-moonshot</module>
<module>agents-flex-llm-coze</module>
<module>agents-flex-llm-gitee</module>
</modules>
<properties>

View File

@ -171,7 +171,7 @@ public class WeatherUtil {
| 大语言模型名称 | 支持情况 | 描述 |
|---------------|--------|-------|
|------------------------------|--------|-------|
| ChatGPT | ✅ 已支持 | - |
| Ollama 部署模型 | ✅ 已支持 | - |
| 星火大模型 | ✅ 已支持 | - |
@ -179,6 +179,11 @@ public class WeatherUtil {
| 文心一言 | 计划中... | 期待 PR |
| 智普 ChatGLM | ✅ 已支持 | - |
| 月之暗面 Moonshot | ✅ 已支持 | - |
| 扣子 Coze | ✅ 已支持 | - |
| GiteeAI - Qwen2-7B-Instruct | ✅ 已支持 | - |
| GiteeAI - Qwen2-72B-Instruct | ✅ 已支持 | - |
| GiteeAI - Yi-1.5-34B-Chat | ✅ 已支持 | - |
| GiteeAI - glm-4-9b-chat | ✅ 已支持 | - |
### 图片生成模型
@ -194,14 +199,16 @@ public class WeatherUtil {
### 向量化模型
| 大语言模型名称 | 支持情况 | 描述 |
|------------|--------|-------|
|-----------------------------|--------|-------|
| Openai | ✅ 已支持 | - |
| 星火大模型 | ✅ 已支持 | - |
| 智普 ChatGLM | ✅ 已支持 | - |
| Ollama | ✅ 已支持 | - |
| 通义千问 | ✅ 已支持 | - |
| GiteeAI - bge-small-zh-v1.5 | ✅ 已支持 | - |
| GiteeAI - bge-large-zh-v1.5 | ✅ 已支持 | - |
| GiteeAI - bge-m3 | ✅ 已支持 | - |
| 更多 |计划中... | 期待 PR |