diff --git a/doc/XXL-JOB官方文档.md b/doc/XXL-JOB官方文档.md
index dc43ba70..4de84280 100644
--- a/doc/XXL-JOB官方文档.md
+++ b/doc/XXL-JOB官方文档.md
@@ -1206,7 +1206,8 @@ public void demoJobHandler() throws Exception {
```
{
"input": "{输入信息,必填信息}",
- "prompt": "{模型prompt,可选信息}"
+ "prompt": "{模型prompt,可选信息}",
+ "model": "{模型实现,如qwen3:0.6b,可选信息}"
}
```
- b、difyWorkflowJobHandler:DifyWorkflow 任务,支持自定义inputs、user、baseUrl、apiKey 等输入信息,示例参数如下;
@@ -2543,7 +2544,9 @@ public void execute() {
### 7.40 版本 v3.2.0 Release Notes[规划中]
-- 1、【规划中】登录安全升级,密码加密处理算法从Md5改为Sha256;
+- 1、【强化】AI任务(ollamaJobHandler)优化:针对 “model” 模型配置信息,从执行器侧文件类配置调整至调度中心“任务参数”动态配置,支持集成多模型、并结合任务动态配置切换。
+- 2、【升级】升级多项maven依赖至较新版本,如 spring-ai、dify 等;
+- 3、【规划中】登录安全升级,密码加密处理算法从Md5改为Sha256;
```
// 1、用户表password字段需要调整长度,执行如下命令
ALTER TABLE xxl_conf_user
@@ -2555,7 +2558,6 @@ UPDATE xxl_conf_user t SET t.password = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5
- 2、【规划中】登录态持久化逻辑调整,简化代码逻辑;
- 3、【规划中】异常页面处理逻辑优化,新增兜底落地页配置;
- 4、【规划中】登录信息页面空值处理优化,避免空值影响ftl渲染;
-- 5、【规划中】升级多项maven依赖至较新版本,如 xxl-tool、gson、junit 等;
- 1、【规划中】登陆态Token生成逻辑优化,混淆登陆时间属性,降低token泄漏风险;
- 2、【规划中】组件扫描改为BeanPostProcessor方式,避免小概率情况下提前初始化;底层组件移除单例写法,汇总factory统一管理;
diff --git a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/pom.xml b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/pom.xml
index 80f0c320..280cb25c 100644
--- a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/pom.xml
+++ b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/pom.xml
@@ -16,8 +16,8 @@
https://www.xuxueli.com/
- 1.0.0-M6
- 1.0.7
+ 1.0.1
+ 1.1.7
@@ -54,7 +54,7 @@
org.springframework.ai
- spring-ai-ollama-spring-boot-starter
+ spring-ai-starter-model-ollama
${spring-ai.version}
@@ -81,9 +81,6 @@
-
- true
-
diff --git a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/config/XxlJobConfig.java b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/config/XxlJobConfig.java
index 2823a09e..1bc3dba2 100644
--- a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/config/XxlJobConfig.java
+++ b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/config/XxlJobConfig.java
@@ -3,12 +3,6 @@ package com.xxl.job.executor.config;
import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.ai.chat.client.ChatClient;
-import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
-import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
-import org.springframework.ai.chat.memory.InMemoryChatMemory;
-import org.springframework.ai.ollama.OllamaChatModel;
-import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -67,25 +61,4 @@ public class XxlJobConfig {
return xxlJobSpringExecutor;
}
- /**
- * ChatClient
- *
- * @param ollamaChatModel
- * @return
- * @throws Exception
- */
- @Bean
- public ChatClient chatClient(OllamaChatModel ollamaChatModel) throws Exception {
- // init ollamaiChatClient
- ChatClient ollamaiChatClient = ChatClient
- .builder(ollamaChatModel)
- .defaultAdvisors(new MessageChatMemoryAdvisor(new InMemoryChatMemory())) // 管理对话上下文记忆
- .defaultAdvisors(new SimpleLoggerAdvisor()) // 记录日志的Advisor,
- .defaultOptions(OllamaOptions.builder().topP(0.7).build()) // 设置ChatModel参数
- .build();
-
- return ollamaiChatClient;
- }
-
-
}
\ No newline at end of file
diff --git a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/controller/IndexController.java b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/controller/IndexController.java
index 0a1981fb..ac3ab64e 100644
--- a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/controller/IndexController.java
+++ b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/controller/IndexController.java
@@ -14,7 +14,11 @@ import jakarta.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.client.ChatClient;
-import org.springframework.beans.factory.annotation.Value;
+import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
+import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
+import org.springframework.ai.chat.memory.MessageWindowChatMemory;
+import org.springframework.ai.ollama.OllamaChatModel;
+import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
@@ -24,7 +28,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
-import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
@@ -45,8 +48,9 @@ public class IndexController {
// --------------------------------- ollama chat ---------------------------------
@Resource
- private ChatClient chatClient;
- private static String prompt = "你好,你是一个研发工程师,擅长解决技术类问题。";
+ private OllamaChatModel ollamaChatModel;
+ private String prompt = "你好,你是一个研发工程师,擅长解决技术类问题。";
+ private String modle = "qwen3:0.6b";
/**
* ChatClient 简单调用
@@ -54,13 +58,24 @@ public class IndexController {
@GetMapping("/chat/simple")
@ResponseBody
public String simpleChat(@RequestParam(value = "input") String input) {
- String result = chatClient
+
+ // build chat-client
+ ChatClient ollamaChatClient = ChatClient
+ .builder(ollamaChatModel)
+ .defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
+ .defaultAdvisors(SimpleLoggerAdvisor.builder().build())
+ .build();
+
+ // call ollama
+ String response = ollamaChatClient
.prompt(prompt)
.user(input)
+ .options(OllamaOptions.builder().model(modle).build())
.call()
.content();
- System.out.println("result: " + result);
- return result;
+
+ logger.info("result: " + response);
+ return response;
}
/**
@@ -69,9 +84,19 @@ public class IndexController {
@GetMapping("/chat/stream")
public Flux streamChat(HttpServletResponse response, @RequestParam(value = "input") String input) {
response.setCharacterEncoding("UTF-8");
- return chatClient
+
+ // build chat-client
+ ChatClient ollamaChatClient = ChatClient
+ .builder(ollamaChatModel)
+ .defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
+ .defaultAdvisors(SimpleLoggerAdvisor.builder().build())
+ .build();
+
+ // call ollama
+ return ollamaChatClient
.prompt(prompt)
.user(input)
+ .options(OllamaOptions.builder().model(modle).build())
.stream()
.content();
}
@@ -85,7 +110,7 @@ public class IndexController {
@GetMapping("/dify/simple")
@ResponseBody
- public String difySimple(@RequestParam(required = false, value = "input") String input) throws IOException {
+ public String difySimple(@RequestParam(required = false, value = "input") String input) throws Exception {
Map inputs = new HashMap<>();
inputs.put("input", input);
@@ -167,7 +192,7 @@ public class IndexController {
sink.error(throwable);
}
});
- } catch (IOException e) {
+ } catch (Exception e) {
throw new RuntimeException(e);
}
}
diff --git a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/jobhandler/AIXxlJob.java b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/jobhandler/AIXxlJob.java
index b7957b32..6ae752aa 100644
--- a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/jobhandler/AIXxlJob.java
+++ b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/java/com/xxl/job/executor/jobhandler/AIXxlJob.java
@@ -10,6 +10,11 @@ import io.github.imfangs.dify.client.model.workflow.WorkflowRunRequest;
import io.github.imfangs.dify.client.model.workflow.WorkflowRunResponse;
import jakarta.annotation.Resource;
import org.springframework.ai.chat.client.ChatClient;
+import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
+import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
+import org.springframework.ai.chat.memory.MessageWindowChatMemory;
+import org.springframework.ai.ollama.OllamaChatModel;
+import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.stereotype.Component;
import java.util.HashMap;
@@ -23,8 +28,10 @@ import java.util.Map;
@Component
public class AIXxlJob {
+ // --------------------------------- ollama chat ---------------------------------
+
@Resource
- private ChatClient chatClient;
+ private OllamaChatModel ollamaChatModel;
/**
* 1、ollama Chat任务
@@ -38,7 +45,7 @@ public class AIXxlJob {
*
*/
@XxlJob("ollamaJobHandler")
- public void ollamaJobHandler() throws Exception {
+ public void ollamaJobHandler() {
// param
String param = XxlJobHelper.getJobParam();
@@ -53,15 +60,18 @@ public class AIXxlJob {
OllamaParam ollamaParam = null;
try {
ollamaParam = GsonTool.fromJson(param, OllamaParam.class);
- if (ollamaParam.getPrompt() == null) {
+ if (ollamaParam.getPrompt()==null || ollamaParam.getPrompt().isBlank()) {
ollamaParam.setPrompt("你是一个研发工程师,擅长解决技术类问题。");
}
- if (ollamaParam.getInput() == null || ollamaParam.getInput().trim().isEmpty()) {
+ if (ollamaParam.getInput() == null || ollamaParam.getInput().isBlank()) {
XxlJobHelper.log("input is empty.");
XxlJobHelper.handleFail();
return;
}
+ if (ollamaParam.getModel()==null || ollamaParam.getModel().isBlank()) {
+ ollamaParam.setModel("qwen3:0.6b");
+ }
} catch (Exception e) {
XxlJobHelper.log(new RuntimeException("OllamaParam parse error", e));
XxlJobHelper.handleFail();
@@ -71,18 +81,28 @@ public class AIXxlJob {
// input
XxlJobHelper.log("
【Input】: " + ollamaParam.getInput()+ "
");
- // invoke
- String result = chatClient
+ // build chat-client
+ ChatClient ollamaChatClient = ChatClient
+ .builder(ollamaChatModel)
+ .defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
+ .defaultAdvisors(SimpleLoggerAdvisor.builder().build())
+ .build();
+
+ // call ollama
+ String response = ollamaChatClient
.prompt(ollamaParam.getPrompt())
.user(ollamaParam.getInput())
+ .options(OllamaOptions.builder().model(ollamaParam.getModel()).build())
.call()
.content();
- XxlJobHelper.log("
【Output】: " + result+ "
");
+
+ XxlJobHelper.log("
【Output】: " + response + "
");
}
private static class OllamaParam{
private String input;
private String prompt;
+ private String model;
public String getInput() {
return input;
@@ -99,9 +119,19 @@ public class AIXxlJob {
public void setPrompt(String prompt) {
this.prompt = prompt;
}
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
}
+ // --------------------------------- dify workflow ---------------------------------
+
/**
* 2、dify Workflow任务
*
diff --git a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/resources/application.properties b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/resources/application.properties
index 103600f9..27d83309 100644
--- a/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/resources/application.properties
+++ b/xxl-job-executor-samples/xxl-job-executor-sample-springboot-ai/src/main/resources/application.properties
@@ -31,9 +31,6 @@ xxl.job.executor.logretentiondays=30
### ollama
+spring.ai.model.chat=ollama
+### ollama url
spring.ai.ollama.base-url=http://localhost:11434
-### chat enabled
-spring.ai.ollama.chat.enabled=true
-### chat model,must install it locally through ollama
-spring.ai.ollama.chat.options.model=qwen2.5:1.5b
-spring.ai.ollama.chat.options.temperature=0.8