feat: open-ai

This commit is contained in:
2026-01-18 17:59:10 +08:00
parent 24d189f945
commit a7c13bc449
7 changed files with 171 additions and 16 deletions

View File

@@ -23,10 +23,10 @@
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-openai-spring-boot-starter</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-openai-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-tika-document-reader</artifactId>

View File

@@ -2,17 +2,30 @@ package com.storm.dev.trigger.http;
import com.storm.dev.api.IAiService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.vectorstore.PgVectorStore;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author: lyd
* @date: 2025/6/7 22:18
*/
@Slf4j
@RestController()
@CrossOrigin("*")
@RequestMapping("/api/v1/ollama/")
@@ -20,6 +33,8 @@ public class OllamaController implements IAiService {
@Resource
private OllamaChatClient chatClient;
@Resource
private PgVectorStore pgVectorStore;
/**
* http://localhost:8090/api/v1/ollama/generate?model=deepseek-r1:7b&message=1+1
@@ -38,4 +53,33 @@ public class OllamaController implements IAiService {
public Flux<ChatResponse> generateStream(@RequestParam String model, @RequestParam String message) {
return chatClient.stream(new Prompt(message, OllamaOptions.create().withModel(model)));
}
@Override
@RequestMapping(value = "generate_stream_rag", method = RequestMethod.GET)
public Flux<ChatResponse> generateStreamRag(@RequestParam String model, @RequestParam String ragTag, @RequestParam String message) {
log.info("用户选择模型:{},知识库:{},提问问题:{}", model, ragTag, message);
// 构建推理模板
String SYSTEM_PROMPT = """
Use the information from the DOCUMENTS section to provide accurate answers but act as if you knew this information innately.
If unsure, simply state that you don't know.
Another thing you need to note is that your reply must be in Chinese!
DOCUMENTS:
{documents}
""";
// 读取向量库信息
SearchRequest request = SearchRequest.query(message).withTopK(5).withFilterExpression("knowledge == '" + ragTag + "'");
// 相似性搜索
List<Document> documents = pgVectorStore.similaritySearch(request);
String documentsCollectors = documents.stream().map(Document::getContent).collect(Collectors.joining());
// 推理RAG
Message ragMessage = new SystemPromptTemplate(SYSTEM_PROMPT).createMessage(Map.of("documents", documentsCollectors));
ArrayList<Message> messages = new ArrayList<>();
messages.add(new UserMessage(message));
messages.add(ragMessage);
// 提问
Flux<ChatResponse> chatResponse = chatClient.stream(new Prompt(messages, OllamaOptions.create().withModel(model)));
return chatResponse;
}
}

View File

@@ -0,0 +1,86 @@
package com.storm.dev.trigger.http;
import com.storm.dev.api.IAiService;
import jakarta.annotation.Resource;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.openai.OpenAiChatClient;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.vectorstore.PgVectorStore;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author: lyd
* @date: 2026/1/18 17:08
*/
@RestController()
@CrossOrigin("*")
@RequestMapping("/api/v1/openai/")
public class OpenAiController implements IAiService {
@Resource
private OpenAiChatClient chatClient;
@Resource
private PgVectorStore pgVectorStore;
@Override
public ChatResponse generate(String model, String message) {
return chatClient.call(new Prompt(message, OpenAiChatOptions.builder().withModel(model).build()));
}
@Override
public Flux<ChatResponse> generateStream(String model, String message) {
return chatClient.stream(new Prompt(
message,
OpenAiChatOptions.builder()
.withModel(model)
.build()
));
}
@Override
public Flux<ChatResponse> generateStreamRag(String model, String ragTag, String message) {
String SYSTEM_PROMPT = """
Use the information from the DOCUMENTS section to provide accurate answers but act as if you knew this information innately.
If unsure, simply state that you don't know.
Another thing you need to note is that your reply must be in Chinese!
DOCUMENTS:
{documents}
""";
// 指定文档搜索
SearchRequest request = SearchRequest.query(message)
.withTopK(5)
.withFilterExpression("knowledge == '" + ragTag + "'");
List<Document> documents = pgVectorStore.similaritySearch(request);
String documentCollectors = documents.stream().map(Document::getContent).collect(Collectors.joining());
Message ragMessage = new SystemPromptTemplate(SYSTEM_PROMPT).createMessage(Map.of("documents", documentCollectors));
List<Message> messages = new ArrayList<>();
messages.add(new UserMessage(message));
messages.add(ragMessage);
return chatClient.stream(new Prompt(
messages,
OpenAiChatOptions.builder()
.withModel(model)
.build()
));
}
}