Initial commit

This commit is contained in:
userpu
2025-12-17 19:47:14 +08:00
commit 0d15e20780
119 changed files with 3582 additions and 0 deletions

View File

@@ -0,0 +1,12 @@
package com.iwe3.langchain4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class ModelParamsApplication {
public static void main(String[] args) {
SpringApplication.run(ModelParamsApplication.class,args);
}
}

View File

@@ -0,0 +1,45 @@
package com.iwe3.langchain4j.config;
import com.iwe3.langchain4j.listener.UserpuChatModelListener;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import java.util.List;
@Configuration
public class LLMConfig {
@Bean(name = "qwen")
public ChatModel chatModelQwen(){
/*模型3件套apikey - modelName - baseUrl */
return OpenAiChatModel.builder()
.apiKey(System.getenv("DASH_SCOPE_API_KEY"))
.modelName("qwen-plus")
.baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
.logRequests(true)
.logResponses(true)
.listeners(List.of(new UserpuChatModelListener()))
.maxRetries(3)
.timeout(Duration.ofSeconds(5))
.build();
}
/**
* @Description:
* 知识出处https://api-docs.deepseek.com/zh-cn/
*/
@Bean(name = "deepseek")
public ChatModel chatModelDeepSeek(){
return
OpenAiChatModel.builder()
.apiKey(System.getenv("DEEP_SEEK_API_KEY"))
.modelName("deepseek-chat")
//.modelName("deepseek-reasoner")
.baseUrl("https://api.deepseek.com/v1")
.build();
}
}

View File

@@ -0,0 +1,27 @@
package com.iwe3.langchain4j.controller;
import dev.langchain4j.model.chat.ChatModel;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
@Slf4j
public class ModelParameterController
{
@Resource(name = "qwen")
private ChatModel chatModelQwen;
// http://localhost:9004/lc4j/modelparam/config?prompt=我是谁
@GetMapping(value = "/lc4j/modelparam/config")
public String config(@RequestParam(value = "prompt", defaultValue = "你是谁") String prompt)
{
var result = chatModelQwen.chat(prompt);
System.out.println("通过langchain4j调用模型返回结果"+result);
return result;
}
}

View File

@@ -0,0 +1,36 @@
package com.iwe3.langchain4j.listener;
import cn.hutool.core.util.IdUtil;
import dev.langchain4j.model.chat.listener.ChatModelErrorContext;
import dev.langchain4j.model.chat.listener.ChatModelListener;
import dev.langchain4j.model.chat.listener.ChatModelRequestContext;
import dev.langchain4j.model.chat.listener.ChatModelResponseContext;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class UserpuChatModelListener implements ChatModelListener
{
@Override
public void onRequest(ChatModelRequestContext requestContext)
{
// onRequest配置的k:v键值对在onResponse阶段可以获得上下文传递参数好用
String uuidValue = IdUtil.simpleUUID();
requestContext.attributes().put("TraceID",uuidValue);
log.info("请求参数requestContext:{}", requestContext+"\t"+uuidValue);
}
@Override
public void onResponse(ChatModelResponseContext responseContext)
{
Object object = responseContext.attributes().get("TraceID");
log.info("返回结果responseContext:{}", object);
}
@Override
public void onError(ChatModelErrorContext errorContext)
{
log.error("请求异常ChatModelErrorContext:{}", errorContext);
}
}

View File

@@ -0,0 +1,9 @@
server:
port: 9004
spring:
application:
name: langchain4j-ai-model-params
logging:
level:
dev:
langchain4j: debug