Initial commit
This commit is contained in:
@@ -0,0 +1,13 @@
|
||||
package com.iweb.langchain4j;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
|
||||
@SpringBootApplication
|
||||
public class StreamApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(StreamApplication.class,args);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
package com.iweb.langchain4j.config;
|
||||
|
||||
import dev.langchain4j.model.chat.ChatModel;
|
||||
import dev.langchain4j.model.chat.StreamingChatModel;
|
||||
import dev.langchain4j.model.openai.OpenAiChatModel;
|
||||
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class LLMConfig {
|
||||
|
||||
/**
|
||||
* @Description: 普通对话接口 ChatModel
|
||||
*/
|
||||
@Bean(name = "qwen")
|
||||
public ChatModel chatModelQwen()
|
||||
{
|
||||
return OpenAiChatModel.builder()
|
||||
.apiKey(System.getenv("DASH_SCOPE_API_KEY"))
|
||||
.modelName("qwen-plus")
|
||||
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
|
||||
.build();
|
||||
}
|
||||
/**
|
||||
* @Description: 流式对话接口 StreamingChatModel
|
||||
*/
|
||||
@Bean
|
||||
public StreamingChatModel streamingChatModel(){
|
||||
return OpenAiStreamingChatModel.builder()
|
||||
.apiKey(System.getenv("DASH_SCOPE_API_KEY"))
|
||||
.modelName("qwen-plus")
|
||||
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
|
||||
.build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
package com.iweb.langchain4j.controller;
|
||||
|
||||
import com.iweb.langchain4j.service.ChatAssistant;
|
||||
import dev.langchain4j.model.chat.StreamingChatModel;
|
||||
import dev.langchain4j.model.chat.response.ChatResponse;
|
||||
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
|
||||
import jakarta.annotation.Resource;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
@RestController
|
||||
@Slf4j
|
||||
public class StreamController {
|
||||
|
||||
@Resource //直接使用 low-level LLM API
|
||||
private StreamingChatModel chatModelQwen;
|
||||
@Resource //自己封装接口使用 high-level LLM API
|
||||
private ChatAssistant chatAssistant;
|
||||
|
||||
// http://localhost:9006/lc4j/chatstream/chat2?prompt=我是谁?
|
||||
@GetMapping(value = "/lc4j/chatstream/chat2")
|
||||
public Flux<String> chat3(@RequestParam(value = "prompt", defaultValue = "你是谁?") String prompt) {
|
||||
return chatAssistant.chatFlux(prompt);
|
||||
}
|
||||
|
||||
|
||||
// http://localhost:9006/lc4j/chatstream/chat?prompt=我是谁?
|
||||
@GetMapping(value = "/lc4j/chatstream/chat")
|
||||
public Flux<String> chat(@RequestParam("prompt") String prompt) {
|
||||
|
||||
return Flux.create(emitter -> {
|
||||
chatModelQwen.chat(prompt, new StreamingChatResponseHandler()
|
||||
{
|
||||
@Override
|
||||
public void onPartialResponse(String partialResponse)
|
||||
{
|
||||
emitter.next(partialResponse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCompleteResponse(ChatResponse completeResponse)
|
||||
{
|
||||
emitter.complete();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(Throwable throwable)
|
||||
{
|
||||
emitter.error(throwable);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package com.iweb.langchain4j.service;
|
||||
|
||||
import dev.langchain4j.service.spring.AiService;
|
||||
import dev.langchain4j.service.spring.AiServiceWiringMode;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
||||
/**
|
||||
* 知识出处:
|
||||
* https://docs.langchain4j.dev/tutorials/spring-boot-integration/#spring-boot-starter-for-declarative-ai-services
|
||||
*/
|
||||
@AiService(wiringMode = AiServiceWiringMode.EXPLICIT
|
||||
,streamingChatModel = "streamingChatModel")
|
||||
public interface ChatAssistant {
|
||||
/**
|
||||
* 普通聊天
|
||||
* @param prompt
|
||||
* @return
|
||||
*/
|
||||
String chat(String prompt);
|
||||
|
||||
/**
|
||||
* 流式聊天
|
||||
* @param prompt
|
||||
* @return
|
||||
*/
|
||||
Flux<String> chatFlux(String prompt);
|
||||
}
|
||||
10
langchain4j-ai-stream/src/main/resources/application.yml
Normal file
10
langchain4j-ai-stream/src/main/resources/application.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
server:
|
||||
port: 9006
|
||||
servlet:
|
||||
encoding:
|
||||
charset: utf-8
|
||||
enabled: true
|
||||
force: true # 设置响应的字符编码,避免流式返回输出乱码
|
||||
spring:
|
||||
application:
|
||||
name: langchain4j-ai-stream
|
||||
Reference in New Issue
Block a user