2024-01-12 16:24:14 +08:00
|
|
|
<h4 align="right"><strong>English</strong> | <a href="./readme_zh.md">简体中文</a></h4>
|
|
|
|
|
2024-03-07 17:08:17 +08:00
|
|
|
<p align="center">
|
2024-03-07 17:28:35 +08:00
|
|
|
<img src="./docs/assets/images/banner.png"/>
|
2024-03-07 17:08:17 +08:00
|
|
|
</p>
|
2024-01-12 16:24:14 +08:00
|
|
|
|
2024-03-07 17:28:35 +08:00
|
|
|
|
2024-01-25 09:55:42 +08:00
|
|
|
# Agents-Flex is a LLM Application Framework like LangChain base on Java.
|
2024-01-12 16:24:14 +08:00
|
|
|
|
2024-01-22 12:31:15 +08:00
|
|
|
---
|
|
|
|
|
2024-01-18 10:04:05 +08:00
|
|
|
## Features
|
|
|
|
|
|
|
|
- LLM Visit
|
2024-05-09 10:06:14 +08:00
|
|
|
- Prompt、Prompt Template
|
2024-01-18 10:04:05 +08:00
|
|
|
- Function Calling Definer, Invoker、Running
|
2024-01-21 17:42:52 +08:00
|
|
|
- Memory
|
2024-01-18 10:04:05 +08:00
|
|
|
- Embedding
|
2024-05-09 10:06:14 +08:00
|
|
|
- Vector Store
|
2024-01-18 10:04:05 +08:00
|
|
|
- Resource Loaders
|
2024-01-24 12:30:20 +08:00
|
|
|
- Document
|
|
|
|
- Splitter
|
|
|
|
- Loader
|
|
|
|
- Parser
|
|
|
|
- PoiParser
|
|
|
|
- PdfBoxParser
|
2024-05-09 10:06:14 +08:00
|
|
|
- Agent
|
|
|
|
- LLM Agent
|
|
|
|
- Chain
|
|
|
|
- SequentialChain
|
|
|
|
- ParallelChain
|
|
|
|
- LoopChain
|
|
|
|
- ChainNode
|
|
|
|
- AgentNode
|
|
|
|
- RouterNode
|
|
|
|
- ELRouterNode
|
|
|
|
- LLMRouterNode
|
2024-01-12 16:24:14 +08:00
|
|
|
|
2024-01-16 16:39:10 +08:00
|
|
|
## Simple Chat
|
2024-01-12 16:24:14 +08:00
|
|
|
|
|
|
|
use OpenAi LLM:
|
|
|
|
|
|
|
|
```java
|
2024-01-26 17:02:38 +08:00
|
|
|
@Test
|
|
|
|
public void testChat() {
|
|
|
|
OpenAiLlmConfig config = new OpenAiLlmConfig();
|
2024-01-12 16:24:14 +08:00
|
|
|
config.setApiKey("sk-rts5NF6n*******");
|
|
|
|
|
|
|
|
Llm llm = new OpenAiLlm(config);
|
2024-01-26 17:02:38 +08:00
|
|
|
String response = llm.chat("what is your name?");
|
2024-01-12 16:24:14 +08:00
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
System.out.println(response);
|
2024-01-12 16:24:14 +08:00
|
|
|
}
|
|
|
|
```
|
|
|
|
|
2024-01-12 17:29:21 +08:00
|
|
|
|
|
|
|
use Qwen LLM:
|
|
|
|
|
|
|
|
```java
|
2024-01-26 17:02:38 +08:00
|
|
|
@Test
|
|
|
|
public void testChat() {
|
2024-01-12 17:29:21 +08:00
|
|
|
QwenLlmConfig config = new QwenLlmConfig();
|
|
|
|
config.setApiKey("sk-28a6be3236****");
|
|
|
|
config.setModel("qwen-turbo");
|
|
|
|
|
|
|
|
Llm llm = new QwenLlm(config);
|
2024-01-26 17:02:38 +08:00
|
|
|
String response = llm.chat("what is your name?");
|
2024-01-12 17:29:21 +08:00
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
System.out.println(response);
|
2024-01-12 17:29:21 +08:00
|
|
|
}
|
|
|
|
```
|
|
|
|
|
|
|
|
|
2024-01-12 16:24:14 +08:00
|
|
|
use SparkAi LLM:
|
|
|
|
|
|
|
|
```java
|
2024-01-26 17:02:38 +08:00
|
|
|
@Test
|
|
|
|
public void testChat() {
|
2024-01-12 16:24:14 +08:00
|
|
|
SparkLlmConfig config = new SparkLlmConfig();
|
|
|
|
config.setAppId("****");
|
|
|
|
config.setApiKey("****");
|
|
|
|
config.setApiSecret("****");
|
|
|
|
|
|
|
|
Llm llm = new SparkLlm(config);
|
2024-01-26 17:02:38 +08:00
|
|
|
String response = llm.chat("what is your name?");
|
2024-01-12 16:24:14 +08:00
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
System.out.println(response);
|
2024-01-12 16:24:14 +08:00
|
|
|
}
|
|
|
|
```
|
|
|
|
|
2024-01-16 16:39:10 +08:00
|
|
|
## Chat With Histories
|
|
|
|
|
|
|
|
|
|
|
|
```java
|
2024-01-26 17:02:38 +08:00
|
|
|
public static void main(String[] args) {
|
2024-01-16 16:39:10 +08:00
|
|
|
SparkLlmConfig config = new SparkLlmConfig();
|
|
|
|
config.setAppId("****");
|
|
|
|
config.setApiKey("****");
|
|
|
|
config.setApiSecret("****");
|
|
|
|
|
|
|
|
Llm llm = new SparkLlm(config);
|
|
|
|
|
|
|
|
HistoriesPrompt prompt = new HistoriesPrompt();
|
|
|
|
|
|
|
|
System.out.println("ask for something...");
|
|
|
|
Scanner scanner = new Scanner(System.in);
|
|
|
|
String userInput = scanner.nextLine();
|
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
while (userInput != null) {
|
2024-01-16 16:39:10 +08:00
|
|
|
|
|
|
|
prompt.addMessage(new HumanMessage(userInput));
|
|
|
|
|
2024-04-25 18:44:38 +08:00
|
|
|
llm.chatStream(prompt, (context, response) -> {
|
2024-01-26 17:02:38 +08:00
|
|
|
System.out.println(">>>> " + response.getMessage().getContent());
|
2024-01-16 16:39:10 +08:00
|
|
|
});
|
|
|
|
|
|
|
|
userInput = scanner.nextLine();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
```
|
2024-01-19 13:44:56 +08:00
|
|
|
|
|
|
|
## Function Calling
|
|
|
|
|
|
|
|
- step 1: define the function native
|
|
|
|
|
|
|
|
```java
|
|
|
|
public class WeatherUtil {
|
|
|
|
|
|
|
|
@FunctionDef(name = "get_the_weather_info", description = "get the weather info")
|
|
|
|
public static String getWeatherInfo(
|
|
|
|
@FunctionParam(name = "city", description = "the city name") String name
|
|
|
|
) {
|
2024-01-20 21:01:56 +08:00
|
|
|
//we should invoke the third part api for weather info here
|
2024-01-19 13:44:56 +08:00
|
|
|
return "Today it will be dull and overcast in " + name;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
```
|
|
|
|
|
|
|
|
- step 2: invoke the function from LLM
|
|
|
|
|
|
|
|
```java
|
2024-01-26 17:02:38 +08:00
|
|
|
public static void main(String[] args) {
|
2024-01-19 13:44:56 +08:00
|
|
|
OpenAiLlmConfig config = new OpenAiLlmConfig();
|
|
|
|
config.setApiKey("sk-rts5NF6n*******");
|
|
|
|
|
|
|
|
OpenAiLlm llm = new OpenAiLlm(config);
|
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
FunctionPrompt prompt = new FunctionPrompt("How is the weather in Beijing today?", WeatherUtil.class);
|
|
|
|
FunctionResultResponse response = llm.chat(prompt);
|
2024-01-19 13:44:56 +08:00
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
Object result = response.invoke();
|
2024-01-19 13:44:56 +08:00
|
|
|
|
2024-01-26 17:02:38 +08:00
|
|
|
System.out.println(result);
|
|
|
|
//Today it will be dull and overcast in Beijing
|
2024-01-19 13:44:56 +08:00
|
|
|
}
|
|
|
|
```
|
2024-01-21 16:14:36 +08:00
|
|
|
|
|
|
|
|
|
|
|
## Communication
|
|
|
|
|
2024-01-29 16:16:27 +08:00
|
|
|
![](./docs/assets/images/wechat-group.png)
|
2024-01-22 12:31:15 +08:00
|
|
|
|
|
|
|
## Modules
|
|
|
|
|
|
|
|
![](./docs/assets/images/modules.jpg)
|