【新增】AI:接入 DeepSeek 模型

This commit is contained in:
YunaiV 2024-07-06 12:18:28 +08:00
parent 2d36ec4858
commit 6c094aaffc
11 changed files with 328 additions and 53 deletions

View File

@ -2,6 +2,8 @@ package cn.iocoder.yudao.framework.ai.config;
import cn.iocoder.yudao.framework.ai.core.factory.AiClientFactory;
import cn.iocoder.yudao.framework.ai.core.factory.AiClientFactoryImpl;
import cn.iocoder.yudao.framework.ai.core.model.deepseek.DeepSeekChatClient;
import cn.iocoder.yudao.framework.ai.core.model.deepseek.DeepSeekChatOptions;
import cn.iocoder.yudao.framework.ai.core.model.midjourney.api.MidjourneyApi;
import cn.iocoder.yudao.framework.ai.core.model.suno.api.SunoApi;
import cn.iocoder.yudao.framework.ai.core.model.xinghuo.XingHuoChatClient;
@ -45,6 +47,19 @@ public class YudaoAiAutoConfiguration {
return new XingHuoChatClient(properties.getAppKey(), properties.getSecretKey(), options);
}
@Bean
@ConditionalOnProperty(value = "yudao.ai.deepseek.enable", havingValue = "true")
public DeepSeekChatClient deepSeekChatClient(YudaoAiProperties yudaoAiProperties) {
YudaoAiProperties.DeepSeekProperties properties = yudaoAiProperties.getDeepSeek();
DeepSeekChatOptions options = DeepSeekChatOptions.builder()
.model(properties.getModel())
.temperature(properties.getTemperature())
.maxTokens(properties.getMaxTokens())
.topP(properties.getTopP())
.build();
return new DeepSeekChatClient(properties.getApiKey(), options);
}
@Bean
@ConditionalOnProperty(value = "yudao.ai.midjourney.enable", havingValue = "true")
public MidjourneyApi midjourneyApi(YudaoAiProperties yudaoAiProperties) {

View File

@ -18,6 +18,11 @@ public class YudaoAiProperties {
*/
private XingHuoProperties xinghuo;
/**
* DeepSeek
*/
private DeepSeekProperties deepSeek;
/**
* Midjourney 绘图
*/
@ -43,6 +48,19 @@ public class YudaoAiProperties {
}
@Data
public static class DeepSeekProperties {
private String enable;
private String apiKey;
private String model;
private Float temperature;
private Integer maxTokens;
private Float topP;
}
@Data
public static class MidjourneyProperties {

View File

@ -3,7 +3,6 @@ package cn.iocoder.yudao.framework.ai.core.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
// TODO 芋艿这块看看要不要调整下
/**
* AI 模型平台
*
@ -13,15 +12,20 @@ import lombok.Getter;
@AllArgsConstructor
public enum AiPlatformEnum {
// ========== 国内平台 ==========
YI_YAN("YiYan", "文心一言"), // 百度
QIAN_WEN("QianWen", "千问"), // 阿里
DEEP_SEEK("DeepSeek", "DeepSeek"), // DeepSeek
XING_HUO("XingHuo", "星火"), // 讯飞
// ========== 国外平台 ==========
OPENAI("OpenAI", "OpenAI"),
OLLAMA("Ollama", "Ollama"),
YI_YAN("YiYan", "文心一言"), // 百度
XING_HUO("XingHuo", "星火"), // 讯飞
QIAN_WEN("QianWen", "千问"), // 阿里
GEMIR ("gemir ", "gemir "), // 谷歌
STABLE_DIFFUSION("StableDiffusion", "StableDiffusion"), // Stability AI
MIDJOURNEY("Midjourney", "Midjourney"),
MIDJOURNEY("Midjourney", "Midjourney"), // Midjourney
SUNO("Suno", "Suno"), // Suno AI
;

View File

@ -1,16 +0,0 @@
package cn.iocoder.yudao.framework.ai.core.exception;
// TODO @fan这个有办法干掉么
/**
* ai 异常
*
* @author fansili
* @time 2024/4/13 17:05
* @since 1.0
*/
public class AiException extends RuntimeException {
public AiException(String message) {
super(message);
}
}

View File

@ -1,15 +0,0 @@
package cn.iocoder.yudao.framework.ai.core.exception;
/**
* 聊天异常
*
* author: fansili
* time: 2024/3/15 20:45
*/
public class ChatException extends RuntimeException {
public ChatException(String message) {
super(message);
}
}

View File

@ -9,6 +9,7 @@ import cn.hutool.extra.spring.SpringUtil;
import cn.iocoder.yudao.framework.ai.config.YudaoAiAutoConfiguration;
import cn.iocoder.yudao.framework.ai.config.YudaoAiProperties;
import cn.iocoder.yudao.framework.ai.core.enums.AiPlatformEnum;
import cn.iocoder.yudao.framework.ai.core.model.deepseek.DeepSeekChatClient;
import cn.iocoder.yudao.framework.ai.core.model.midjourney.api.MidjourneyApi;
import cn.iocoder.yudao.framework.ai.core.model.suno.api.SunoApi;
import cn.iocoder.yudao.framework.ai.core.model.xinghuo.XingHuoChatClient;
@ -64,8 +65,8 @@ public class AiClientFactoryImpl implements AiClientFactory {
return buildXingHuoChatClient(apiKey);
case QIAN_WEN:
return buildQianWenChatClient(apiKey);
// case GEMIR:
// return buildGoogleGemir(apiKey);
case DEEP_SEEK:
return buildDeepSeekChatClient(apiKey);
default:
throw new IllegalArgumentException(StrUtil.format("未知平台({})", platform));
}
@ -182,6 +183,10 @@ public class AiClientFactoryImpl implements AiClientFactory {
return new XingHuoChatClient(appKey, secretKey);
}
private static DeepSeekChatClient buildDeepSeekChatClient(String apiKey) {
return new DeepSeekChatClient(apiKey);
}
/**
* 可参考 {@link TongYiAutoConfiguration#tongYiChatClient(Generation, TongYiChatProperties, TongYiConnectionProperties)}
*/
@ -195,13 +200,6 @@ public class AiClientFactoryImpl implements AiClientFactory {
return new TongYiAutoConfiguration().tongYiChatClient(generation, chatOptions, connectionProperties);
}
// private static VertexAiGeminiChatClient buildGoogleGemir(String key) {
// List<String> keys = StrUtil.split(key, '|');
// Assert.equals(keys.size(), 2, "VertexAiGeminiChatClient 的密钥需要 (projectId|location) 格式");
// VertexAI vertexApi = new VertexAI(keys.get(0), keys.get(1));
// return new VertexAiGeminiChatClient(vertexApi);
// }
private OpenAiImageModel buildOpenAiImageClient(String openAiToken, String url) {
url = StrUtil.blankToDefault(url, ApiUtils.DEFAULT_BASE_URL);
OpenAiImageApi openAiApi = new OpenAiImageApi(url, openAiToken, RestClient.builder());

View File

@ -0,0 +1,165 @@
package cn.iocoder.yudao.framework.ai.core.model.deepseek;
import cn.hutool.core.lang.Assert;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.metadata.ChatGenerationMetadata;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.model.Generation;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.model.ModelOptionsUtils;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.openai.api.OpenAiApi;
import org.springframework.ai.openai.metadata.OpenAiChatResponseMetadata;
import org.springframework.ai.retry.RetryUtils;
import org.springframework.http.ResponseEntity;
import org.springframework.retry.support.RetryTemplate;
import reactor.core.publisher.Flux;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static cn.iocoder.yudao.framework.ai.core.model.deepseek.DeepSeekChatOptions.MODEL_DEFAULT;
/**
* DeepSeek {@link ChatModel} 实现类
*
* @author fansili
*/
@Slf4j
public class DeepSeekChatClient implements ChatModel {
private static final String BASE_URL = "https://api.deepseek.com";
private final DeepSeekChatOptions defaultOptions;
private final RetryTemplate retryTemplate;
/**
* DeepSeek 兼容 OpenAI HTTP 接口所以复用它的实现简化接入成本
*
* 不过要注意DeepSeek 没有完全兼容所以不能使用 {@link org.springframework.ai.openai.OpenAiChatModel} 调用但是实现会参考它
*/
private final OpenAiApi openAiApi;
public DeepSeekChatClient(String apiKey) {
this(apiKey, DeepSeekChatOptions.builder().model(MODEL_DEFAULT).temperature(0.7F).build());
}
public DeepSeekChatClient(String apiKey, DeepSeekChatOptions options) {
this(apiKey, options, RetryUtils.DEFAULT_RETRY_TEMPLATE);
}
public DeepSeekChatClient(String apiKey, DeepSeekChatOptions options, RetryTemplate retryTemplate) {
Assert.notEmpty(apiKey, "apiKey 不能为空");
Assert.notNull(options, "options 不能为空");
Assert.notNull(retryTemplate, "retryTemplate 不能为空");
this.openAiApi = new OpenAiApi(BASE_URL, apiKey);
this.defaultOptions = options;
this.retryTemplate = retryTemplate;
}
@Override
public ChatResponse call(Prompt prompt) {
OpenAiApi.ChatCompletionRequest request = createRequest(prompt, false);
return this.retryTemplate.execute(ctx -> {
// 1.1 发起调用
ResponseEntity<OpenAiApi.ChatCompletion> completionEntity = openAiApi.chatCompletionEntity(request);
// 1.2 校验结果
OpenAiApi.ChatCompletion chatCompletion = completionEntity.getBody();
if (chatCompletion == null) {
log.warn("No chat completion returned for prompt: {}", prompt);
return new ChatResponse(List.of());
}
List<OpenAiApi.ChatCompletion.Choice> choices = chatCompletion.choices();
if (choices == null) {
log.warn("No choices returned for prompt: {}", prompt);
return new ChatResponse(List.of());
}
// 2. 转换 ChatResponse 返回
List<Generation> generations = choices.stream().map(choice -> {
Generation generation = new Generation(choice.message().content(), toMap(chatCompletion.id(), choice));
if (choice.finishReason() != null) {
generation.withGenerationMetadata(ChatGenerationMetadata.from(choice.finishReason().name(), null));
}
return generation;
}).toList();
return new ChatResponse(generations,
OpenAiChatResponseMetadata.from(completionEntity.getBody()));
});
}
private Map<String, Object> toMap(String id, OpenAiApi.ChatCompletion.Choice choice) {
Map<String, Object> map = new HashMap<>();
OpenAiApi.ChatCompletionMessage message = choice.message();
if (message.role() != null) {
map.put("role", message.role().name());
}
if (choice.finishReason() != null) {
map.put("finishReason", choice.finishReason().name());
}
map.put("id", id);
return map;
}
@Override
public Flux<ChatResponse> stream(Prompt prompt) {
OpenAiApi.ChatCompletionRequest request = createRequest(prompt, true);
return this.retryTemplate.execute(ctx -> {
// 1. 发起调用
Flux<OpenAiApi.ChatCompletionChunk> response = this.openAiApi.chatCompletionStream(request);
return response.map(chatCompletion -> {
String id = chatCompletion.id();
// 2. 转换 ChatResponse 返回
List<Generation> generations = chatCompletion.choices().stream().map(choice -> {
String finish = (choice.finishReason() != null ? choice.finishReason().name() : "");
String role = (choice.delta().role() != null ? choice.delta().role().name() : "");
if (choice.finishReason() == OpenAiApi.ChatCompletionFinishReason.STOP) {
// 兜底处理 DeepSeek 返回 STOP role 为空的情况
role = OpenAiApi.ChatCompletionMessage.Role.ASSISTANT.name();
}
Generation generation = new Generation(choice.delta().content(),
Map.of("id", id, "role", role, "finishReason", finish));
if (choice.finishReason() != null) {
generation = generation.withGenerationMetadata(
ChatGenerationMetadata.from(choice.finishReason().name(), null));
}
return generation;
}).toList();
return new ChatResponse(generations);
});
});
}
OpenAiApi.ChatCompletionRequest createRequest(Prompt prompt, boolean stream) {
// 1. 构建 ChatCompletionMessage 对象
List<OpenAiApi.ChatCompletionMessage> chatCompletionMessages = prompt.getInstructions().stream().map(m ->
new OpenAiApi.ChatCompletionMessage(m.getContent(), OpenAiApi.ChatCompletionMessage.Role.valueOf(m.getMessageType().name()))).toList();
OpenAiApi.ChatCompletionRequest request = new OpenAiApi.ChatCompletionRequest(chatCompletionMessages, stream);
// 2.1 补充 prompt 内置的 options
if (prompt.getOptions() != null) {
if (prompt.getOptions() instanceof ChatOptions runtimeOptions) {
OpenAiChatOptions updatedRuntimeOptions = ModelOptionsUtils.copyToTarget(runtimeOptions,
ChatOptions.class, OpenAiChatOptions.class);
request = ModelOptionsUtils.merge(updatedRuntimeOptions, request, OpenAiApi.ChatCompletionRequest.class);
} else {
throw new IllegalArgumentException("Prompt options are not of type ChatOptions: "
+ prompt.getOptions().getClass().getSimpleName());
}
}
// 2.2 补充默认 options
if (this.defaultOptions != null) {
request = ModelOptionsUtils.merge(request, this.defaultOptions, OpenAiApi.ChatCompletionRequest.class);
}
return request;
}
@Override
public ChatOptions getDefaultOptions() {
return DeepSeekChatOptions.fromOptions(defaultOptions);
}
}

View File

@ -0,0 +1,55 @@
package cn.iocoder.yudao.framework.ai.core.model.deepseek;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.ai.chat.prompt.ChatOptions;
/**
* DeepSeek {@link ChatOptions} 实现类
*
* 参考文档<a href="https://platform.deepseek.com/api-docs/zh-cn/">快速开始</a>
*
* @author fansili
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class DeepSeekChatOptions implements ChatOptions {
public static final String MODEL_DEFAULT = "deepseek-chat";
/**
* 模型
*/
private String model;
/**
* 温度
*/
private Float temperature;
/**
* 最大 Token
*/
private Integer maxTokens;
/**
* topP
*/
private Float topP;
@Override
public Integer getTopK() {
return null;
}
public static DeepSeekChatOptions fromOptions(DeepSeekChatOptions fromOptions) {
return DeepSeekChatOptions.builder()
.model(fromOptions.getModel())
.temperature(fromOptions.getTemperature())
.maxTokens(fromOptions.getMaxTokens())
.topP(fromOptions.getTopP())
.build();
}
}

View File

@ -2,6 +2,7 @@ package cn.iocoder.yudao.framework.ai.core.util;
import cn.hutool.core.util.StrUtil;
import cn.iocoder.yudao.framework.ai.core.enums.AiPlatformEnum;
import cn.iocoder.yudao.framework.ai.core.model.deepseek.DeepSeekChatOptions;
import cn.iocoder.yudao.framework.ai.core.model.xinghuo.XingHuoChatOptions;
import com.alibaba.cloud.ai.tongyi.chat.TongYiChatOptions;
import org.springframework.ai.chat.messages.*;
@ -33,6 +34,8 @@ public class AiUtils {
return XingHuoChatOptions.builder().model(model).temperature(temperatureF).maxTokens(maxTokens).build();
case QIAN_WEN:
return TongYiChatOptions.builder().withModel(model).withTemperature(temperature).withMaxTokens(maxTokens).build();
case DEEP_SEEK:
return DeepSeekChatOptions.builder().model(model).temperature(temperatureF).maxTokens(maxTokens).build();
default:
throw new IllegalArgumentException(StrUtil.format("未知平台({})", platform));
}

View File

@ -0,0 +1,50 @@
package cn.iocoder.yudao.framework.ai.chat;
import cn.iocoder.yudao.framework.ai.core.model.deepseek.DeepSeekChatClient;
import org.junit.jupiter.api.Test;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.SystemMessage;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import reactor.core.publisher.Flux;
import java.util.ArrayList;
import java.util.List;
/**
* {@link DeepSeekChatClient} 集成测试
*
* @author 芋道源码
*/
public class DeepSeekChatTests {
private final DeepSeekChatClient chatModel = new DeepSeekChatClient("sk-e94db327cc7d457d99a8de8810fc6b12");
@Test
public void testCall() {
// 准备参数
List<Message> messages = new ArrayList<>();
messages.add(new SystemMessage("你是一个优质的文言文作者,用文言文描述着各城市的人文风景。"));
messages.add(new UserMessage("1 + 1 = "));
// 调用
ChatResponse response = chatModel.call(new Prompt(messages));
// 打印结果
System.out.println(response);
}
@Test
public void testStream() {
// 准备参数
List<Message> messages = new ArrayList<>();
messages.add(new SystemMessage("你是一个优质的文言文作者,用文言文描述着各城市的人文风景。"));
messages.add(new UserMessage("1 + 1 = "));
// 调用
Flux<ChatResponse> flux = chatModel.stream(new Prompt(messages));
// 打印结果
flux.doOnNext(System.out::println).then().block();
}
}

View File

@ -19,7 +19,7 @@ import java.util.List;
*/
public class XingHuoChatClientTests {
private final XingHuoChatClient client = new XingHuoChatClient(
private final XingHuoChatClient chatModel = new XingHuoChatClient(
"cb6415c19d6162cda07b47316fcb0416",
"Y2JiYTIxZjA3MDMxMjNjZjQzYzVmNzdh");
@ -31,9 +31,9 @@ public class XingHuoChatClientTests {
messages.add(new UserMessage("1 + 1 = "));
// 调用
ChatResponse response = client.call(new Prompt(messages));
ChatResponse response = chatModel.call(new Prompt(messages));
// 打印结果
System.err.println(response);
System.out.println(response);
}
@Test
@ -44,11 +44,9 @@ public class XingHuoChatClientTests {
messages.add(new UserMessage("1 + 1 = "));
// 调用
Flux<ChatResponse> flux = client.stream(new Prompt(messages));
Flux<ChatResponse> flux = chatModel.stream(new Prompt(messages));
// 打印结果
List<ChatResponse> responses = flux.collectList().block();
assert responses != null;
responses.forEach(System.err::println);
flux.doOnNext(System.out::println).then().block();
}
}