【强化】AI任务(ollamaJobHandler)优化:针对 “model” 模型配置信息,从执行器侧文件类配置调整至调度中心“任务参数”动态配置,支持集成多模型、并结合任务动态配置切换。

3.2.0-release
xuxueli 4 weeks ago
parent 3cffd2f5ae
commit 05c0f83fc0

@ -1206,7 +1206,8 @@ public void demoJobHandler() throws Exception {
``` ```
{ {
"input": "{输入信息,必填信息}", "input": "{输入信息,必填信息}",
"prompt": "{模型prompt可选信息}" "prompt": "{模型prompt可选信息}",
"model": "{模型实现如qwen3:0.6b,可选信息}"
} }
``` ```
- b、difyWorkflowJobHandlerDifyWorkflow 任务支持自定义inputs、user、baseUrl、apiKey 等输入信息,示例参数如下; - b、difyWorkflowJobHandlerDifyWorkflow 任务支持自定义inputs、user、baseUrl、apiKey 等输入信息,示例参数如下;
@ -2543,7 +2544,9 @@ public void execute() {
### 7.40 版本 v3.2.0 Release Notes[规划中] ### 7.40 版本 v3.2.0 Release Notes[规划中]
- 1、【规划中】登录安全升级密码加密处理算法从Md5改为Sha256 - 1、【强化】AI任务ollamaJobHandler优化针对 “model” 模型配置信息,从执行器侧文件类配置调整至调度中心“任务参数”动态配置,支持集成多模型、并结合任务动态配置切换。
- 2、【升级】升级多项maven依赖至较新版本如 spring-ai、dify 等;
- 3、【规划中】登录安全升级密码加密处理算法从Md5改为Sha256
``` ```
// 1、用户表password字段需要调整长度执行如下命令 // 1、用户表password字段需要调整长度执行如下命令
ALTER TABLE xxl_conf_user ALTER TABLE xxl_conf_user
@ -2555,7 +2558,6 @@ UPDATE xxl_conf_user t SET t.password = '8d969eef6ecad3c29a3a629280e686cf0c3f5d5
- 2、【规划中】登录态持久化逻辑调整简化代码逻辑 - 2、【规划中】登录态持久化逻辑调整简化代码逻辑
- 3、【规划中】异常页面处理逻辑优化新增兜底落地页配置 - 3、【规划中】异常页面处理逻辑优化新增兜底落地页配置
- 4、【规划中】登录信息页面空值处理优化避免空值影响ftl渲染 - 4、【规划中】登录信息页面空值处理优化避免空值影响ftl渲染
- 5、【规划中】升级多项maven依赖至较新版本如 xxl-tool、gson、junit 等;
- 1、【规划中】登陆态Token生成逻辑优化混淆登陆时间属性降低token泄漏风险 - 1、【规划中】登陆态Token生成逻辑优化混淆登陆时间属性降低token泄漏风险
- 2、【规划中】组件扫描改为BeanPostProcessor方式避免小概率情况下提前初始化底层组件移除单例写法汇总factory统一管理 - 2、【规划中】组件扫描改为BeanPostProcessor方式避免小概率情况下提前初始化底层组件移除单例写法汇总factory统一管理

@ -16,8 +16,8 @@
<url>https://www.xuxueli.com/</url> <url>https://www.xuxueli.com/</url>
<properties> <properties>
<spring-ai.version>1.0.0-M6</spring-ai.version> <spring-ai.version>1.0.1</spring-ai.version>
<dify-java-client.version>1.0.7</dify-java-client.version> <dify-java-client.version>1.1.7</dify-java-client.version>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
@ -54,7 +54,7 @@
<!-- spring-ai --> <!-- spring-ai -->
<dependency> <dependency>
<groupId>org.springframework.ai</groupId> <groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId> <artifactId>spring-ai-starter-model-ollama</artifactId>
<version>${spring-ai.version}</version> <version>${spring-ai.version}</version>
</dependency> </dependency>
@ -81,9 +81,6 @@
</goals> </goals>
</execution> </execution>
</executions> </executions>
<configuration>
<fork>true</fork>
</configuration>
</plugin> </plugin>
</plugins> </plugins>
</build> </build>

@ -3,12 +3,6 @@ package com.xxl.job.executor.config;
import com.xxl.job.core.executor.impl.XxlJobSpringExecutor; import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.InMemoryChatMemory;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Configuration;
@ -67,25 +61,4 @@ public class XxlJobConfig {
return xxlJobSpringExecutor; return xxlJobSpringExecutor;
} }
/**
* ChatClient
*
* @param ollamaChatModel
* @return
* @throws Exception
*/
@Bean
public ChatClient chatClient(OllamaChatModel ollamaChatModel) throws Exception {
// init ollamaiChatClient
ChatClient ollamaiChatClient = ChatClient
.builder(ollamaChatModel)
.defaultAdvisors(new MessageChatMemoryAdvisor(new InMemoryChatMemory())) // 管理对话上下文记忆
.defaultAdvisors(new SimpleLoggerAdvisor()) // 记录日志的Advisor
.defaultOptions(OllamaOptions.builder().topP(0.7).build()) // 设置ChatModel参数
.build();
return ollamaiChatClient;
}
} }

@ -14,7 +14,11 @@ import jakarta.servlet.http.HttpServletResponse;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.client.ChatClient; import org.springframework.ai.chat.client.ChatClient;
import org.springframework.beans.factory.annotation.Value; import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.stereotype.Controller; import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
@ -24,7 +28,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink; import reactor.core.publisher.FluxSink;
import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.function.Consumer; import java.util.function.Consumer;
@ -45,8 +48,9 @@ public class IndexController {
// --------------------------------- ollama chat --------------------------------- // --------------------------------- ollama chat ---------------------------------
@Resource @Resource
private ChatClient chatClient; private OllamaChatModel ollamaChatModel;
private static String prompt = "你好,你是一个研发工程师,擅长解决技术类问题。"; private String prompt = "你好,你是一个研发工程师,擅长解决技术类问题。";
private String modle = "qwen3:0.6b";
/** /**
* ChatClient * ChatClient
@ -54,13 +58,24 @@ public class IndexController {
@GetMapping("/chat/simple") @GetMapping("/chat/simple")
@ResponseBody @ResponseBody
public String simpleChat(@RequestParam(value = "input") String input) { public String simpleChat(@RequestParam(value = "input") String input) {
String result = chatClient
// build chat-client
ChatClient ollamaChatClient = ChatClient
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.build();
// call ollama
String response = ollamaChatClient
.prompt(prompt) .prompt(prompt)
.user(input) .user(input)
.options(OllamaOptions.builder().model(modle).build())
.call() .call()
.content(); .content();
System.out.println("result: " + result);
return result; logger.info("result: " + response);
return response;
} }
/** /**
@ -69,9 +84,19 @@ public class IndexController {
@GetMapping("/chat/stream") @GetMapping("/chat/stream")
public Flux<String> streamChat(HttpServletResponse response, @RequestParam(value = "input") String input) { public Flux<String> streamChat(HttpServletResponse response, @RequestParam(value = "input") String input) {
response.setCharacterEncoding("UTF-8"); response.setCharacterEncoding("UTF-8");
return chatClient
// build chat-client
ChatClient ollamaChatClient = ChatClient
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.build();
// call ollama
return ollamaChatClient
.prompt(prompt) .prompt(prompt)
.user(input) .user(input)
.options(OllamaOptions.builder().model(modle).build())
.stream() .stream()
.content(); .content();
} }
@ -85,7 +110,7 @@ public class IndexController {
@GetMapping("/dify/simple") @GetMapping("/dify/simple")
@ResponseBody @ResponseBody
public String difySimple(@RequestParam(required = false, value = "input") String input) throws IOException { public String difySimple(@RequestParam(required = false, value = "input") String input) throws Exception {
Map<String, Object> inputs = new HashMap<>(); Map<String, Object> inputs = new HashMap<>();
inputs.put("input", input); inputs.put("input", input);
@ -167,7 +192,7 @@ public class IndexController {
sink.error(throwable); sink.error(throwable);
} }
}); });
} catch (IOException e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }

@ -10,6 +10,11 @@ import io.github.imfangs.dify.client.model.workflow.WorkflowRunRequest;
import io.github.imfangs.dify.client.model.workflow.WorkflowRunResponse; import io.github.imfangs.dify.client.model.workflow.WorkflowRunResponse;
import jakarta.annotation.Resource; import jakarta.annotation.Resource;
import org.springframework.ai.chat.client.ChatClient; import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.MessageChatMemoryAdvisor;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.memory.MessageWindowChatMemory;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.HashMap; import java.util.HashMap;
@ -23,8 +28,10 @@ import java.util.Map;
@Component @Component
public class AIXxlJob { public class AIXxlJob {
// --------------------------------- ollama chat ---------------------------------
@Resource @Resource
private ChatClient chatClient; private OllamaChatModel ollamaChatModel;
/** /**
* 1ollama Chat * 1ollama Chat
@ -38,7 +45,7 @@ public class AIXxlJob {
* </pre> * </pre>
*/ */
@XxlJob("ollamaJobHandler") @XxlJob("ollamaJobHandler")
public void ollamaJobHandler() throws Exception { public void ollamaJobHandler() {
// param // param
String param = XxlJobHelper.getJobParam(); String param = XxlJobHelper.getJobParam();
@ -53,15 +60,18 @@ public class AIXxlJob {
OllamaParam ollamaParam = null; OllamaParam ollamaParam = null;
try { try {
ollamaParam = GsonTool.fromJson(param, OllamaParam.class); ollamaParam = GsonTool.fromJson(param, OllamaParam.class);
if (ollamaParam.getPrompt() == null) { if (ollamaParam.getPrompt()==null || ollamaParam.getPrompt().isBlank()) {
ollamaParam.setPrompt("你是一个研发工程师,擅长解决技术类问题。"); ollamaParam.setPrompt("你是一个研发工程师,擅长解决技术类问题。");
} }
if (ollamaParam.getInput() == null || ollamaParam.getInput().trim().isEmpty()) { if (ollamaParam.getInput() == null || ollamaParam.getInput().isBlank()) {
XxlJobHelper.log("input is empty."); XxlJobHelper.log("input is empty.");
XxlJobHelper.handleFail(); XxlJobHelper.handleFail();
return; return;
} }
if (ollamaParam.getModel()==null || ollamaParam.getModel().isBlank()) {
ollamaParam.setModel("qwen3:0.6b");
}
} catch (Exception e) { } catch (Exception e) {
XxlJobHelper.log(new RuntimeException("OllamaParam parse error", e)); XxlJobHelper.log(new RuntimeException("OllamaParam parse error", e));
XxlJobHelper.handleFail(); XxlJobHelper.handleFail();
@ -71,18 +81,28 @@ public class AIXxlJob {
// input // input
XxlJobHelper.log("<br><br><b>【Input】: " + ollamaParam.getInput()+ "</b><br><br>"); XxlJobHelper.log("<br><br><b>【Input】: " + ollamaParam.getInput()+ "</b><br><br>");
// invoke // build chat-client
String result = chatClient ChatClient ollamaChatClient = ChatClient
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.build();
// call ollama
String response = ollamaChatClient
.prompt(ollamaParam.getPrompt()) .prompt(ollamaParam.getPrompt())
.user(ollamaParam.getInput()) .user(ollamaParam.getInput())
.options(OllamaOptions.builder().model(ollamaParam.getModel()).build())
.call() .call()
.content(); .content();
XxlJobHelper.log("<br><br><b>【Output】: " + result+ "</b><br><br>");
XxlJobHelper.log("<br><br><b>【Output】: " + response + "</b><br><br>");
} }
private static class OllamaParam{ private static class OllamaParam{
private String input; private String input;
private String prompt; private String prompt;
private String model;
public String getInput() { public String getInput() {
return input; return input;
@ -99,9 +119,19 @@ public class AIXxlJob {
public void setPrompt(String prompt) { public void setPrompt(String prompt) {
this.prompt = prompt; this.prompt = prompt;
} }
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
} }
// --------------------------------- dify workflow ---------------------------------
/** /**
* 2dify Workflow * 2dify Workflow
* *

@ -31,9 +31,6 @@ xxl.job.executor.logretentiondays=30
### ollama ### ollama
spring.ai.model.chat=ollama
### ollama url
spring.ai.ollama.base-url=http://localhost:11434 spring.ai.ollama.base-url=http://localhost:11434
### chat enabled
spring.ai.ollama.chat.enabled=true
### chat modelmust install it locally through ollama
spring.ai.ollama.chat.options.model=qwen2.5:1.5b
spring.ai.ollama.chat.options.temperature=0.8

Loading…
Cancel
Save