3.3.1-release
xuxueli 4 months ago
parent b48a68a07a
commit 166c28608c

@ -0,0 +1,51 @@
package com.xxl.job.executor.test.dify;
import com.xxl.job.core.executor.impl.XxlJobSpringExecutor;
import io.github.imfangs.dify.client.DifyClientFactory;
import io.github.imfangs.dify.client.DifyWorkflowClient;
import io.github.imfangs.dify.client.enums.ResponseMode;
import io.github.imfangs.dify.client.model.workflow.WorkflowRunRequest;
import io.github.imfangs.dify.client.model.workflow.WorkflowRunResponse;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.bean.override.mockito.MockitoBean;
import java.util.Map;
@SpringBootTest
public class DifyTest {
private static final Logger logger = LoggerFactory.getLogger(DifyTest.class);
// ignore
@MockitoBean
private XxlJobSpringExecutor xxlJobSpringExecutor;
@Test
public void test() throws Exception {
String baseUrl = "https://xx.ai";
String apiKey = "xx";
String user = "zhangsan";
Map<String, Object> inputs = Map.of(
"input", "请写一个java程序实现一个方法输入一个字符串返回字符串的长度。"
);
// dify request
WorkflowRunRequest request = WorkflowRunRequest.builder()
.inputs(inputs)
.responseMode(ResponseMode.BLOCKING)
.user(user)
.build();
// dify invoke
DifyWorkflowClient workflowClient = DifyClientFactory.createWorkflowClient(baseUrl, apiKey);
WorkflowRunResponse response = workflowClient.runWorkflow(request);
// response
logger.info("input: " + inputs);
logger.info("output: " + response.getData().getOutputs());
}
}

@ -13,6 +13,9 @@ import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaChatOptions;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.bean.override.mockito.MockitoBean;
import reactor.core.publisher.Flux;
import java.util.concurrent.TimeUnit;
@SpringBootTest
public class OllamaTest {
@ -27,7 +30,7 @@ public class OllamaTest {
private OllamaChatModel ollamaChatModel;
@Test
public void test() {
public void chatTest() {
String model = "qwen3:0.6b";
String prompt = "背景说明:你是一个研发工程师,擅长解决技术类问题。";
@ -53,5 +56,39 @@ public class OllamaTest {
logger.info("response: {}", response);
}
@Test
public void chatStreamTest() throws InterruptedException {
String model = "qwen3:0.6b";
String prompt = "背景说明:你是一个研发工程师,擅长解决技术类问题。";
String input = "请写一个java程序实现一个方法输入一个字符串返回字符串的长度。";
// build chat-client
ChatClient ollamaChatClient = ChatClient
.builder(ollamaChatModel)
.defaultAdvisors(MessageChatMemoryAdvisor.builder(MessageWindowChatMemory.builder().build()).build())
.defaultAdvisors(SimpleLoggerAdvisor.builder().build())
.defaultOptions(OllamaChatOptions.builder().model(model).build())
.build();
// call ollama
logger.info("input: {}", input);
Flux<String> flux = ollamaChatClient
.prompt(prompt)
.user(input)
.stream()
.content();
flux.subscribe(
data -> System.out.println("Received: " + data), // onNext 处理
error -> System.err.println("Error: " + error), // onError 处理
() -> System.out.println("Completed") // onComplete 处理
);
TimeUnit.SECONDS.sleep(10);
}
}

Loading…
Cancel
Save