Files
Keep/keep-notes/lib/ai/providers/deepseek.ts

132 lines
4.0 KiB
TypeScript

import { createOpenAI } from '@ai-sdk/openai';
import { generateObject, generateText as aiGenerateText, embed, stepCountIs } from 'ai';
import { z } from 'zod';
import { AIProvider, TagSuggestion, TitleSuggestion, ToolUseOptions, ToolCallResult } from '../types';
export class DeepSeekProvider implements AIProvider {
private model: any;
private embeddingModel: any;
constructor(apiKey: string, modelName: string = 'deepseek-chat', embeddingModelName: string = 'deepseek-embedding') {
// Create OpenAI-compatible client for DeepSeek
const deepseek = createOpenAI({
baseURL: 'https://api.deepseek.com/v1',
apiKey: apiKey,
});
this.model = deepseek.chat(modelName);
this.embeddingModel = deepseek.embedding(embeddingModelName);
}
async generateTags(content: string): Promise<TagSuggestion[]> {
try {
const { object } = await generateObject({
model: this.model,
schema: z.object({
tags: z.array(z.object({
tag: z.string().describe('Le nom du tag, court et en minuscules'),
confidence: z.number().min(0).max(1).describe('Le niveau de confiance entre 0 et 1')
}))
}),
prompt: `Analyse la note suivante et suggère entre 1 et 5 tags pertinents.
Contenu de la note: "${content}"`,
});
return object.tags;
} catch (e) {
console.error('Erreur génération tags DeepSeek:', e);
return [];
}
}
async getEmbeddings(text: string): Promise<number[]> {
try {
const { embedding } = await embed({
model: this.embeddingModel,
value: text,
});
return embedding;
} catch (e) {
console.error('Erreur embeddings DeepSeek:', e);
return [];
}
}
async generateTitles(prompt: string): Promise<TitleSuggestion[]> {
try {
const { object } = await generateObject({
model: this.model,
schema: z.object({
titles: z.array(z.object({
title: z.string().describe('Le titre suggéré'),
confidence: z.number().min(0).max(1).describe('Le niveau de confiance entre 0 et 1')
}))
}),
prompt: prompt,
});
return object.titles;
} catch (e) {
console.error('Erreur génération titres DeepSeek:', e);
return [];
}
}
async generateText(prompt: string): Promise<string> {
try {
const { text } = await aiGenerateText({
model: this.model,
prompt: prompt,
});
return text.trim();
} catch (e) {
console.error('Erreur génération texte DeepSeek:', e);
throw e;
}
}
async chat(messages: any[], systemPrompt?: string): Promise<any> {
try {
const { text } = await aiGenerateText({
model: this.model,
system: systemPrompt,
messages: messages,
});
return { text: text.trim() };
} catch (e) {
console.error('Erreur chat DeepSeek:', e);
throw e;
}
}
async generateWithTools(options: ToolUseOptions): Promise<ToolCallResult> {
const { tools, maxSteps = 10, systemPrompt, messages, prompt } = options
const opts: Record<string, any> = {
model: this.model,
tools,
stopWhen: stepCountIs(maxSteps),
}
if (systemPrompt) opts.system = systemPrompt
if (messages) opts.messages = messages
else if (prompt) opts.prompt = prompt
const result = await aiGenerateText(opts as any)
return {
toolCalls: result.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
toolResults: result.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || [],
text: result.text,
steps: result.steps?.map((step: any) => ({
text: step.text,
toolCalls: step.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
toolResults: step.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || []
})) || []
}
}
getModel() {
return this.model;
}
}