springboot整合LangChain4j

springboot整合LangChain4j

起男 156 2025-03-25

springboot整合LangChain4j

以下操作针对阿里灵积平台

开始使用

导入依赖

    <dependencyManagement>
        <dependencies>
            <dependency>
                <groupId>dev.langchain4j</groupId>
                <artifactId>langchain4j-community-bom</artifactId>
                <version>1.0.0-beta1</version>
                <type>pom</type>
                <scope>import</scope>
            </dependency>
        </dependencies>
    </dependencyManagement>

	<dependencies>
        <dependency>
            <groupId>dev.langchain4j</groupId>
            <artifactId>langchain4j-community-dashscope-spring-boot-starter</artifactId>
        </dependency>
	</dependencies>

配置文件

langchain4j:
  community:
    dashscope:
      chat-model:
        api-key: 你的apikey
        model-name: 模型名

controller

@RestController
@RequestMapping("/ai")
public class ChatController {
    @Autowired
    private QwenChatModel qwenChatModel;//千问

    @RequestMapping("/chat")
    public String chat(@RequestParam(defaultValue = "你是谁")String message){
        return qwenChatModel.chat(message);
    }
}

流式输出

导入依赖

流式输出需要使用flux

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-webflux</artifactId>
        </dependency>

添加配置

langchain4j:
  community:
    dashscope:
      streaming-chat-model:
        api-key: 你的apikey
        model-name: 模型名

controller

@RestController
@RequestMapping("/ai")
public class ChatController {

    @Autowired
    private QwenStreamingChatModel qwenStreamingChatModel;

    @RequestMapping(value = "/stream",produces = "text/stream;charset=UTF-8")//解决乱码问题
    public Flux<String> stream(@RequestParam(defaultValue = "你是谁")String message){
        Flux<String> flux = Flux.create(fluxSink -> {
            qwenStreamingChatModel.chat(message, new StreamingChatResponseHandler() {
                @Override
                public void onPartialResponse(String s) {
                    System.out.println("响应文本:" + s);
                    fluxSink.next(s);
                }

                @Override
                public void onCompleteResponse(ChatResponse chatResponse) {
                    System.out.println("结束");
                    fluxSink.complete();
                }

                @Override
                public void onError(Throwable throwable) {
                    System.out.println("错误");
                    fluxSink.error(throwable);
                }
            });
        });
        return flux;
    }
}

记忆对话

实现原理

        ChatLanguageModel model = QwenChatModel
                .builder()
                .apiKey("xxxx")
                .modelName("xxxx")
                .build();
        UserMessage message1 = UserMessage.userMessage("你好我是张三");
        ChatResponse resp1 = model.chat(message1);
        AiMessage aiMessage1 = resp1.aiMessage();
        System.out.println(aiMessage1.text());
        System.out.println("------");
        ChatResponse resp2 = model.chat(message1, aiMessage1, UserMessage.userMessage("我叫什么"));
        System.out.println(resp2.aiMessage().text());

多轮对话的实现原理就是将之前的对话和响应重新发送给大模型

但是这样写非常麻烦,如果多轮对话要发送很多的参数

所以,langchain4j对此进行了封装(动态代理)

引入依赖

        <dependency>
            <groupId>dev.langchain4j</groupId>
            <artifactId>langchain4j</artifactId>
            <version>1.0.0-beta1</version>
        </dependency>

创建接口

public interface Assistant {
    //普通响应
    String chat(String message);
    //流式响应
    TokenStream stream(String message);
}

添加配置

@Configuration
public class AiConfig {

    @Bean
    public Assistant assistant(ChatLanguageModel chatLanguageModel,
                               StreamingChatLanguageModel streamingChatLanguageModel) {
        //设置最大存储记录,默认存在内存中
        MessageWindowChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
		//为Assistant创建动态代理对象
        Assistant assistant = AiServices.builder(Assistant.class)
                .chatLanguageModel(chatLanguageModel)
                .streamingChatLanguageModel(streamingChatLanguageModel)
                .chatMemory(chatMemory)//绑定对话记忆对象
                .build();
        return assistant;
    }
}

controller

@RestController
@RequestMapping("/ai")
public class ChatController {

    @Autowired
    private Assistant assistant;

    @RequestMapping("assistant/chat")
    public String assistantChat(@RequestParam String message){
        return assistant.chat(message);
    }

    @RequestMapping(value = "assistant/stream",produces = "text/stream;charset=UTF-8")
    public Flux<String> assistantStream(@RequestParam String message){
        TokenStream stream = assistant.stream(message);
        return Flux.create(sink -> {
            stream.onPartialResponse(sink::next)
                    .onCompleteResponse(c->sink.complete())
                    .onError(sink::error)
                    .start();
        });
    }
}

对话隔离

通过memoryId进行隔离

创建接口

public interface AssistantUnique {
    //普通响应
    String chat(@MemoryId int memoryId,@UserMessage String message);
    //流式响应
    TokenStream stream(@MemoryId int memoryId,@UserMessage String message);
}

添加配置

	@Bean
    public AssistantUnique assistantUnique(ChatLanguageModel chatLanguageModel,
                               StreamingChatLanguageModel streamingChatLanguageModel) {
        //创建代理
        AssistantUnique assistant = AiServices.builder(AssistantUnique.class)
                .chatLanguageModel(chatLanguageModel)
                .streamingChatLanguageModel(streamingChatLanguageModel)
                .chatMemoryProvider(memoryId->MessageWindowChatMemory
                        .builder()
                        .maxMessages(10)
                        .id(memoryId)
                        .build())
                .build();
        return assistant;
    }

controller

@RestController
@RequestMapping("/ai")
public class ChatController {

    @Autowired
    private AssistantUnique assistantUnique;

    @RequestMapping("assistant/chat/unique")
    public String assistantChatUnique(@RequestParam Integer id,@RequestParam String message){
        return assistantUnique.chat(id,message);
    }
}

对话记忆持久化

存储对象

public class PersistentChatMemoryStore implements ChatMemoryStore {
    //根据实际情况使用redis、mysql之类的技术进行存储
    
    @Override
    public List<ChatMessage> getMessages(Object o) {
        //获取记录
        return List.of();
    }

    @Override
    public void updateMessages(Object o, List<ChatMessage> list) {
        //存入或修改记录
    }

    @Override
    public void deleteMessages(Object o) {
        //删除记录
    }
}

使用存储对象

    @Bean
    public AssistantUnique assistantUniqueStore(ChatLanguageModel chatLanguageModel,
                                           StreamingChatLanguageModel streamingChatLanguageModel) {
        //对话记忆的存储对象
        ChatMemoryStore store = new PersistentChatMemoryStore();
        //创建代理
        AssistantUnique assistant = AiServices.builder(AssistantUnique.class)
                .chatLanguageModel(chatLanguageModel)
                .streamingChatLanguageModel(streamingChatLanguageModel)
                .chatMemoryProvider(memoryId->MessageWindowChatMemory
                        .builder()
                        .maxMessages(10)
                        .id(memoryId)
                        .chatMemoryStore(store)//使用存储对象
                        .build())
                .build();
        return assistant;
    }

FunctionCall(Tools)

创建tools

@Service
public class ToolsService {

    @Tool("全国有多少个名字")//告诉ai怎么已配这个方法
    public Integer nameCount(@P("姓名")String name){//告诉ai需要提取的信息
        //业务
        System.out.println(name);
        return 112233;//结果
    }
}

绑定tools

    @Bean
    public Assistant assistant(ChatLanguageModel chatLanguageModel,
                               StreamingChatLanguageModel streamingChatLanguageModel,
                               ToolsService toolsService) {

        MessageWindowChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);

        Assistant assistant = AiServices.builder(Assistant.class)
                .chatLanguageModel(chatLanguageModel)
                .streamingChatLanguageModel(streamingChatLanguageModel)
                .chatMemory(chatMemory)
                .tools(toolsService)//绑定tools
                .build();

        return assistant;
    }

预设角色

通过@SystemMessage注解预设角色

设置角色

public interface Assistant {
    //普通响应
    String chat(String message);
    //流式响应
    TokenStream stream(String message);

    @SystemMessage("""
            你是航空公司的客服。请以友好、乐于助人的方式回复
            在提供有关预定或者取消预定的信息之前,你必须从用户处获取:身份证号和手机号
            请讲中文
            当前日期是{{current_date}}
            """)
        //预设角色,通过@V设置参数,通过@UserMessage设置用户消息
    String chat(@UserMessage String message, @V("current_date")String currentDate);
}

tools

    @Tool("预定")
    public String reserve(@P("身份证") String idNumber,
                          @P("手机号") String cellNumber){
        //业务
        System.out.println(idNumber);
        System.out.println(cellNumber);
        return "预定成功";
    }

controller

    @RequestMapping("assistant/chat/role")
    public String assistantChatRole(@RequestParam String message){
        return assistant.chat(message, LocalDate.now().toString());
    }

结构化输出

大模型会自动帮助我们将对话封装成对象

此功能需要模型支持

实体类

@Data
public class User {
    private Integer id;
    private String username;
    private String age;
}

访问代理

public interface UserService {
    User getUser(String message);
}

配置代理

    @Bean
    public UserService userService(ChatLanguageModel model) {
        return  AiServices.builder(UserService.class)
                .chatLanguageModel(model)
                .build();
    }

controller

    private final UserService userService;

    @GetMapping("/chat/json")
    public User json(@RequestParam String message){
        return userService.getUser(message);
    }

多模态

可以同时发送多种形式内容,如文字和图片

需要模型支持

controller

    @SneakyThrows
    @GetMapping("/chat/img")
    public String get(@RequestParam(defaultValue = "图片是什么") String message, @RequestParam("img")MultipartFile img) {
        String base64Str = Base64.getEncoder().encodeToString(img.getBytes());
        UserMessage um = UserMessage.from(TextContent.from(message),
                ImageContent.from(base64Str, "image/png"));

        return model.chat(List.of(um)).aiMessage().text();
    }