chore(deps): 更新依赖版本并重构测试包结构

- 将 JUnit Jupiter 版本从 5.14.0 升级到 6.0.1
- 将 Spring 版本从 6.2.12 更新至 6.2.13
- 将 MySQL Connector/J 版本从 9.4.0 升级到 9.5.0
- 将 Spring AI 版本从 1.0.3 升级到 1.1.0
- 将 Dify Java Client 版本从 1.1.9 升级到 1.2.0
- 重命名测试类包名:adminbiz 和 executorbiz 包改为 openapi
- 在 FramelessApplicationTest 中替换 System.out 为 SLF4J 日志记录
- 添加 JUnit Platform Launcher 依赖(被注释)
- 更新 OllamaOptions 类引用为 OllamaChatOptions
- 为简单聊天和流式聊天接口添加默认输入值及模型配置选项
3.3.0-release
xuxueli 3 weeks ago
parent 852cae575e
commit 41354cfd35

@ -32,17 +32,17 @@
<central-publishing-maven-plugin.version>0.9.0</central-publishing-maven-plugin.version>
<!-- base -->
<slf4j-api.version>2.0.17</slf4j-api.version>
<junit-jupiter.version>5.14.0</junit-jupiter.version>
<junit-jupiter.version>6.0.1</junit-jupiter.version>
<!-- jakarta.annotation-api -->
<jakarta.annotation-api.version>3.0.0</jakarta.annotation-api.version>
<!-- spring -->
<spring-boot.version>3.5.7</spring-boot.version>
<spring.version>6.2.12</spring.version>
<spring.version>6.2.13</spring.version>
<!-- mybatis & db -->
<mybatis-spring-boot-starter.version>3.0.5</mybatis-spring-boot-starter.version>
<mysql-connector-j.version>9.4.0</mysql-connector-j.version>
<mysql-connector-j.version>9.5.0</mysql-connector-j.version>
<!-- net -->
<netty.version>4.2.7.Final</netty.version>

@ -1,4 +1,4 @@
package com.xxl.job.adminbiz;
package com.xxl.job.openapi;
import com.xxl.job.core.constant.RegistType;
import com.xxl.job.core.openapi.AdminBiz;

@ -1,4 +1,4 @@
package com.xxl.job.executorbiz;
package com.xxl.job.openapi;
import com.xxl.job.core.constant.Const;
import com.xxl.job.core.openapi.ExecutorBiz;

@ -31,6 +31,12 @@
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>
<!--<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-launcher</artifactId>
<version>${junit-jupiter.version}</version>
<scope>test</scope>
</dependency>-->
<!-- xxl-job-core -->
<dependency>

@ -1,12 +1,21 @@
package com.xxl.job.executor.sample.frameless.test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.platform.commons.annotation.Testable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Testable
public class FramelessApplicationTest {
private static final Logger logger = LoggerFactory.getLogger(FramelessApplicationTest.class);
@Test
public void test(){
System.out.println("111");
@DisplayName("test1")
public void test1(){
logger.info("111");
Assertions.assertNull( null);
}
}

@ -16,8 +16,8 @@
<url>https://www.xuxueli.com/</url>
<properties>
<spring-ai.version>1.0.3</spring-ai.version>
<dify-java-client.version>1.1.9</dify-java-client.version>
<spring-ai.version>1.1.0</spring-ai.version>
<dify-java-client.version> 1.2.0</dify-java-client.version>
</properties>
<dependencyManagement>

@ -18,7 +18,7 @@ import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.ollama.api.OllamaChatOptions;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
@ -57,20 +57,20 @@ public class IndexController {
*/
@GetMapping("/chat/simple")
@ResponseBody
public String simpleChat(@RequestParam(value = "input") String input) {
public String simpleChat(@RequestParam(value = "input", required = false, defaultValue = "介绍你自己") String input) {
// build chat-client
ChatClient ollamaChatClient = ChatClient
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build()) // add memory
.defaultAdvisors(SimpleLoggerAdvisor.builder().build()) // add logger
.defaultOptions(OllamaChatOptions.builder().model(modle).build()) // assign model
.build();
// call ollama
String response = ollamaChatClient
.prompt(prompt)
.user(input)
.options(OllamaOptions.builder().model(modle).build())
.call()
.content();
@ -82,7 +82,7 @@ public class IndexController {
* ChatClient
*/
@GetMapping("/chat/stream")
public Flux<String> streamChat(HttpServletResponse response, @RequestParam(value = "input") String input) {
public Flux<String> streamChat(HttpServletResponse response, @RequestParam(value = "input", required = false, defaultValue = "介绍你自己") String input) {
response.setCharacterEncoding("UTF-8");
// build chat-client
@ -90,13 +90,13 @@ public class IndexController {
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.defaultOptions(OllamaChatOptions.builder().model(modle).build())
.build();
// call ollama
return ollamaChatClient
.prompt(prompt)
.user(input)
.options(OllamaOptions.builder().model(modle).build())
.stream()
.content();
}

@ -14,7 +14,7 @@ import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.ollama.api.OllamaChatOptions;
import org.springframework.stereotype.Component;
import java.util.HashMap;
@ -86,13 +86,13 @@ public class AIXxlJob {
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.defaultOptions(OllamaChatOptions.builder().model(ollamaParam.getModel()).build())
.build();
// call ollama
String response = ollamaChatClient
.prompt(ollamaParam.getPrompt())
.user(ollamaParam.getInput())
.options(OllamaOptions.builder().model(ollamaParam.getModel()).build())
.call()
.content();

Loading…
Cancel
Save