refactor(ai): 实现 DeepSeek 和 Tongyi API 适配器

- 移除 ai.adapter.ts 中的 DeepSeekAdapter 和 TongyiAdapter 类
- 在 deepseek.adapter.ts 和 tongyi.adapter.ts 中实现具体的 API 调用逻辑
- 添加错误处理和响应解析功能
- 优化代码结构,提高可维护性和可扩展性
This commit is contained in:
kingecg 2025-06-10 20:41:51 +08:00
parent 6473395f86
commit b56dc0797e
3 changed files with 155 additions and 42 deletions

View File

@ -4,31 +4,3 @@ export interface AiServiceAdapter {
generateText(model: string, prompt: string, config?: Record<string, any>): Promise<string>;
chat(model: string, messages: Array<{role: string, content: string}>, config?: Record<string, any>): Promise<string>;
}
export class DeepSeekAdapter implements AiServiceAdapter {
constructor(private readonly config: AiConfig) {}
async generateText(model: string, prompt: string): Promise<string> {
// 实现DeepSeek的文本生成逻辑
return `Generated text by ${model}: ${prompt}`;
}
async chat(model: string, messages: Array<{role: string, content: string}>): Promise<string> {
// 实现DeepSeek的对话交互逻辑
return `Chat response from ${model}: ${JSON.stringify(messages)}`;
}
}
export class TongyiAdapter implements AiServiceAdapter {
constructor(private readonly config: AiConfig) {}
async generateText(model: string, prompt: string): Promise<string> {
// 实现通义千问的文本生成逻辑
return `Tongyi generated text: ${prompt}`;
}
async chat(model: string, messages: Array<{role: string, content: string}>): Promise<string> {
// 实现通义千问的对话交互逻辑
return `Tongyi chat response: ${JSON.stringify(messages)}`;
}
}

View File

@ -1,16 +1,84 @@
import { HttpService } from '@nestjs/axios';
import { Injectable } from '@nestjs/common';
import { firstValueFrom } from 'rxjs';
import { AiConfig } from './ai-config.entity';
import { AiServiceAdapter } from './ai.adapter';
@Injectable()
export class DeepSeekAdapter implements AiServiceAdapter {
constructor(private readonly config: any) {}
private readonly httpService: HttpService;
// 实现AiServiceAdapter接口定义的方法
async generateText(model: string, prompt: string, config?: Record<string, any>): Promise<string> {
// 这里添加调用DeepSeek API的具体实现
return `Response from DeepSeek: ${prompt}`;
constructor(private readonly config: AiConfig) {
this.httpService = new HttpService({
baseURL: this.config.apiUrl,
headers: {
'Authorization': `Bearer ${this.config.apiKey}`,
'Content-Type': 'application/json',
},
});
}
/**
*
* @param model
* @param prompt
* @param config
* @returns
*/
async generateText(model: string, prompt: string, config?: Record<string, any>): Promise<string> {
try {
const response = await firstValueFrom(
this.httpService.post('/v1/completions', {
model,
prompt,
max_tokens: config?.max_tokens || 1000,
temperature: config?.temperature || 0.7,
top_p: config?.top_p || 1,
frequency_penalty: config?.frequency_penalty || 0,
presence_penalty: config?.presence_penalty || 0,
})
);
if (response.data && response.data.choices && response.data.choices.length > 0) {
return response.data.choices[0].text;
}
throw new Error('Invalid response from DeepSeek API');
} catch (error:any) {
console.error('DeepSeek API error:', error.response?.data || error.message);
throw new Error(`Failed to generate text: ${error.message}`);
}
}
/**
*
* @param model
* @param messages
* @param config
* @returns
*/
async chat(model: string, messages: Array<{role: string, content: string}>, config?: Record<string, any>): Promise<string> {
// 这里添加调用DeepSeek API的具体实现
return `Chat response from DeepSeek: ${messages[messages.length - 1].content}`;
try {
const response = await firstValueFrom(
this.httpService.post('/v1/chat/completions', {
model,
messages,
max_tokens: config?.max_tokens || 1000,
temperature: config?.temperature || 0.7,
top_p: config?.top_p || 1,
frequency_penalty: config?.frequency_penalty || 0,
presence_penalty: config?.presence_penalty || 0,
})
);
if (response.data && response.data.choices && response.data.choices.length > 0) {
return response.data.choices[0].message.content;
}
throw new Error('Invalid response from DeepSeek API');
} catch (error:any) {
console.error('DeepSeek API error:', error.response?.data || error.message);
throw new Error(`Failed to chat: ${error.message}`);
}
}
}

View File

@ -1,16 +1,89 @@
import { HttpService } from '@nestjs/axios';
import { Injectable } from '@nestjs/common';
import { firstValueFrom } from 'rxjs';
import { AiConfig } from './ai-config.entity';
import { AiServiceAdapter } from './ai.adapter';
@Injectable()
export class TongyiAdapter implements AiServiceAdapter {
constructor(private readonly config: any) {}
private readonly httpService: HttpService;
// 实现AiServiceAdapter接口定义的方法
async generateText(model: string, prompt: string, config?: Record<string, any>): Promise<string> {
// 这里添加调用Tongyi API的具体实现
return `Response from Tongyi: ${prompt}`;
constructor(private readonly config: AiConfig) {
this.httpService = new HttpService({
baseURL: this.config.apiUrl,
headers: {
'Authorization': `Bearer ${this.config.apiKey}`,
'Content-Type': 'application/json',
},
});
}
/**
*
* @param model
* @param prompt
* @param config
* @returns
*/
async generateText(model: string, prompt: string, config?: Record<string, any>): Promise<string> {
try {
const response = await firstValueFrom(
this.httpService.post('/v1/text/generation', {
model,
input: {
prompt,
max_tokens: config?.max_tokens || 1000,
temperature: config?.temperature || 0.7,
top_p: config?.top_p || 1,
stop: config?.stop || [],
}
})
);
if (response.data && response.data.output && response.data.output.text) {
return response.data.output.text;
}
throw new Error('Invalid response from Tongyi API');
} catch (error:any) {
console.error('Tongyi API error:', error.response?.data || error.message);
throw new Error(`Failed to generate text: ${error.message}`);
}
}
/**
*
* @param model
* @param messages
* @param config
* @returns
*/
async chat(model: string, messages: Array<{role: string, content: string}>, config?: Record<string, any>): Promise<string> {
// 这里添加调用Tongyi API的具体实现
return `Chat response from Tongyi: ${messages[messages.length - 1].content}`;
try {
const response = await firstValueFrom(
this.httpService.post('/v1/chat/completions', {
model,
messages: messages.map(msg => ({
role: msg.role,
content: msg.content
})),
parameters: {
max_tokens: config?.max_tokens || 1000,
temperature: config?.temperature || 0.7,
top_p: config?.top_p || 1,
stop: config?.stop || [],
}
})
);
if (response.data && response.data.output && response.data.output.text) {
return response.data.output.text;
}
throw new Error('Invalid response from Tongyi API');
} catch (error:any) {
console.error('Tongyi API error:', error.response?.data || error.message);
throw new Error(`Failed to chat: ${error.message}`);
}
}
}