新值:AI大模型接入-Spring AI Alibaba 引入 Ollama

This commit is contained in:
huangge1199 2025-05-17 10:31:24 +08:00
parent 5b2d1988da
commit 485e985246
5 changed files with 48 additions and 4 deletions

View File

@ -87,7 +87,12 @@
<artifactId>spring-ai-alibaba-starter</artifactId>
<version>1.0.0-M6.1</version>
</dependency>
<!-- Spring AI Alibaba 引入 Ollama -->
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
<version>1.0.0-M6</version>
</dependency>
</dependencies>
<repositories>

View File

@ -17,4 +17,6 @@ public interface InvokeService {
JSONObject getMsgByHttp(String question);
String getMsgBySpringAi(String question);
String getMsgBySpringAiOllam(String question);
}

View File

@ -6,6 +6,7 @@ import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpResponse;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatModel;
import com.alibaba.dashscope.aigc.generation.Generation;
import com.alibaba.dashscope.aigc.generation.GenerationParam;
import com.alibaba.dashscope.common.Message;
@ -15,8 +16,8 @@ import com.alibaba.dashscope.exception.NoApiKeyException;
import com.huangge1199.aiagent.Service.InvokeService;
import jakarta.annotation.Resource;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@ -33,7 +34,10 @@ public class InvokeServiceImpl implements InvokeService {
private String baiLianKey;
@Resource
private ChatModel dashscopeChatModel;
private DashScopeChatModel dashscopeChatModel;
@Resource
private OllamaChatModel ollamaChatModel;
@Override
public JSONObject callWithMessage(String question) throws NoApiKeyException, InputRequiredException {
@ -106,4 +110,12 @@ public class InvokeServiceImpl implements InvokeService {
.getOutput();
return output.getText();
}
@Override
public String getMsgBySpringAiOllam(String question) {
AssistantMessage output = ollamaChatModel.call(new Prompt(question))
.getResult()
.getOutput();
return output.getText();
}
}

View File

@ -62,4 +62,15 @@ public class InvokeController {
return R.fail(e.getMessage());
}
}
@PostMapping("/ollama")
@Operation(summary = "spring ai 引入 ollama")
public R<String> ollamaInvoke(@RequestBody String question) {
try {
String result = invokeService.getMsgBySpringAiOllam(question);
return R.ok(result);
} catch (ApiException e) {
return R.fail(e.getMessage());
}
}
}

View File

@ -3,6 +3,15 @@ spring:
name: long-ai-agent
profiles:
active: dev
ai:
dashscope:
chat:
options:
model: qwen-plus
ollama:
base-url: http://192.168.188.2:11435
chat:
model: gemma3:1b
server:
port: 8123
servlet:
@ -23,4 +32,9 @@ springdoc:
knife4j:
enable: true
setting:
language: zh_cn
language: zh_cn
# 日志打印
logging:
level:
org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor = debug