feat: AI provider testing page + multi-provider support + UX design spec
- Add AI Provider Testing page (/admin/ai-test) with Tags and Embeddings tests - Add new AI providers: CustomOpenAI, DeepSeek, OpenRouter - Add API routes for AI config, models listing, and testing endpoints - Add UX Design Specification document for Phase 1 MVP AI - Add PRD Phase 1 MVP AI planning document - Update admin settings and sidebar navigation - Fix AI factory for multi-provider support
This commit is contained in:
@@ -1,30 +1,77 @@
|
||||
import { OpenAIProvider } from './providers/openai';
|
||||
import { OllamaProvider } from './providers/ollama';
|
||||
import { CustomOpenAIProvider } from './providers/custom-openai';
|
||||
import { AIProvider } from './types';
|
||||
|
||||
export function getAIProvider(config?: Record<string, string>): AIProvider {
|
||||
const providerType = config?.AI_PROVIDER || process.env.AI_PROVIDER || 'ollama';
|
||||
type ProviderType = 'ollama' | 'openai' | 'custom';
|
||||
|
||||
switch (providerType.toLowerCase()) {
|
||||
function createOllamaProvider(config: Record<string, string>, modelName: string, embeddingModelName: string): OllamaProvider {
|
||||
let baseUrl = config?.OLLAMA_BASE_URL || process.env.OLLAMA_BASE_URL || 'http://localhost:11434';
|
||||
|
||||
// Ensure baseUrl doesn't end with /api, we'll add it in OllamaProvider
|
||||
if (baseUrl.endsWith('/api')) {
|
||||
baseUrl = baseUrl.slice(0, -4); // Remove /api
|
||||
}
|
||||
|
||||
return new OllamaProvider(baseUrl, modelName, embeddingModelName);
|
||||
}
|
||||
|
||||
function createOpenAIProvider(config: Record<string, string>, modelName: string, embeddingModelName: string): OpenAIProvider {
|
||||
const apiKey = config?.OPENAI_API_KEY || process.env.OPENAI_API_KEY || '';
|
||||
|
||||
if (!apiKey) {
|
||||
console.warn('OPENAI_API_KEY non configurée.');
|
||||
}
|
||||
|
||||
return new OpenAIProvider(apiKey, modelName, embeddingModelName);
|
||||
}
|
||||
|
||||
function createCustomOpenAIProvider(config: Record<string, string>, modelName: string, embeddingModelName: string): CustomOpenAIProvider {
|
||||
const apiKey = config?.CUSTOM_OPENAI_API_KEY || process.env.CUSTOM_OPENAI_API_KEY || '';
|
||||
const baseUrl = config?.CUSTOM_OPENAI_BASE_URL || process.env.CUSTOM_OPENAI_BASE_URL || '';
|
||||
|
||||
if (!apiKey) {
|
||||
console.warn('CUSTOM_OPENAI_API_KEY non configurée.');
|
||||
}
|
||||
|
||||
if (!baseUrl) {
|
||||
console.warn('CUSTOM_OPENAI_BASE_URL non configurée.');
|
||||
}
|
||||
|
||||
return new CustomOpenAIProvider(apiKey, baseUrl, modelName, embeddingModelName);
|
||||
}
|
||||
|
||||
function getProviderInstance(providerType: ProviderType, config: Record<string, string>, modelName: string, embeddingModelName: string): AIProvider {
|
||||
switch (providerType) {
|
||||
case 'ollama':
|
||||
let baseUrl = config?.OLLAMA_BASE_URL || process.env.OLLAMA_BASE_URL || 'http://localhost:11434';
|
||||
const model = config?.AI_MODEL_TAGS || process.env.OLLAMA_MODEL || 'granite4:latest';
|
||||
const embedModel = config?.AI_MODEL_EMBEDDING || process.env.OLLAMA_EMBEDDING_MODEL || 'embeddinggemma:latest';
|
||||
|
||||
// Ensure baseUrl doesn't end with /api, we'll add it in OllamaProvider
|
||||
if (baseUrl.endsWith('/api')) {
|
||||
baseUrl = baseUrl.slice(0, -4); // Remove /api
|
||||
}
|
||||
|
||||
return new OllamaProvider(baseUrl, model, embedModel);
|
||||
return createOllamaProvider(config, modelName, embeddingModelName);
|
||||
case 'openai':
|
||||
return createOpenAIProvider(config, modelName, embeddingModelName);
|
||||
case 'custom':
|
||||
return createCustomOpenAIProvider(config, modelName, embeddingModelName);
|
||||
default:
|
||||
const apiKey = config?.OPENAI_API_KEY || process.env.OPENAI_API_KEY || '';
|
||||
const aiModel = config?.AI_MODEL_TAGS || process.env.OPENAI_MODEL || 'gpt-4o-mini';
|
||||
|
||||
if (!apiKey && providerType.toLowerCase() === 'openai') {
|
||||
console.warn('OPENAI_API_KEY non configurée.');
|
||||
}
|
||||
return new OpenAIProvider(apiKey, aiModel);
|
||||
console.warn(`Provider AI inconnu: ${providerType}, utilisation de Ollama par défaut`);
|
||||
return createOllamaProvider(config, modelName, embeddingModelName);
|
||||
}
|
||||
}
|
||||
|
||||
export function getTagsProvider(config?: Record<string, string>): AIProvider {
|
||||
const providerType = (config?.AI_PROVIDER_TAGS || process.env.AI_PROVIDER_TAGS || 'ollama').toLowerCase() as ProviderType;
|
||||
const modelName = config?.AI_MODEL_TAGS || process.env.AI_MODEL_TAGS || 'granite4:latest';
|
||||
const embeddingModelName = config?.AI_MODEL_EMBEDDING || process.env.AI_MODEL_EMBEDDING || 'embeddinggemma:latest';
|
||||
|
||||
return getProviderInstance(providerType, config || {}, modelName, embeddingModelName);
|
||||
}
|
||||
|
||||
export function getEmbeddingsProvider(config?: Record<string, string>): AIProvider {
|
||||
const providerType = (config?.AI_PROVIDER_EMBEDDING || process.env.AI_PROVIDER_EMBEDDING || 'ollama').toLowerCase() as ProviderType;
|
||||
const modelName = config?.AI_MODEL_TAGS || process.env.AI_MODEL_TAGS || 'granite4:latest';
|
||||
const embeddingModelName = config?.AI_MODEL_EMBEDDING || process.env.AI_MODEL_EMBEDDING || 'embeddinggemma:latest';
|
||||
|
||||
return getProviderInstance(providerType, config || {}, modelName, embeddingModelName);
|
||||
}
|
||||
|
||||
// Legacy function for backward compatibility
|
||||
export function getAIProvider(config?: Record<string, string>): AIProvider {
|
||||
return getTagsProvider(config);
|
||||
}
|
||||
|
||||
59
keep-notes/lib/ai/providers/custom-openai.ts
Normal file
59
keep-notes/lib/ai/providers/custom-openai.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, embed } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion } from '../types';
|
||||
|
||||
export class CustomOpenAIProvider implements AIProvider {
|
||||
private model: any;
|
||||
private embeddingModel: any;
|
||||
|
||||
constructor(
|
||||
apiKey: string,
|
||||
baseUrl: string,
|
||||
modelName: string = 'gpt-4o-mini',
|
||||
embeddingModelName: string = 'text-embedding-3-small'
|
||||
) {
|
||||
// Create OpenAI-compatible client with custom base URL
|
||||
const customClient = createOpenAI({
|
||||
baseURL: baseUrl,
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = customClient(modelName);
|
||||
this.embeddingModel = customClient.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
async generateTags(content: string): Promise<TagSuggestion[]> {
|
||||
try {
|
||||
const { object } = await generateObject({
|
||||
model: this.model,
|
||||
schema: z.object({
|
||||
tags: z.array(z.object({
|
||||
tag: z.string().describe('Le nom du tag, court et en minuscules'),
|
||||
confidence: z.number().min(0).max(1).describe('Le niveau de confiance entre 0 et 1')
|
||||
}))
|
||||
}),
|
||||
prompt: `Analyse la note suivante et suggère entre 1 et 5 tags pertinents.
|
||||
Contenu de la note: "${content}"`,
|
||||
});
|
||||
|
||||
return object.tags;
|
||||
} catch (e) {
|
||||
console.error('Erreur génération tags Custom OpenAI:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getEmbeddings(text: string): Promise<number[]> {
|
||||
try {
|
||||
const { embedding } = await embed({
|
||||
model: this.embeddingModel,
|
||||
value: text,
|
||||
});
|
||||
return embedding;
|
||||
} catch (e) {
|
||||
console.error('Erreur embeddings Custom OpenAI:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
54
keep-notes/lib/ai/providers/deepseek.ts
Normal file
54
keep-notes/lib/ai/providers/deepseek.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, embed } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion } from '../types';
|
||||
|
||||
export class DeepSeekProvider implements AIProvider {
|
||||
private model: any;
|
||||
private embeddingModel: any;
|
||||
|
||||
constructor(apiKey: string, modelName: string = 'deepseek-chat', embeddingModelName: string = 'deepseek-embedding') {
|
||||
// Create OpenAI-compatible client for DeepSeek
|
||||
const deepseek = createOpenAI({
|
||||
baseURL: 'https://api.deepseek.com/v1',
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = deepseek(modelName);
|
||||
this.embeddingModel = deepseek.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
async generateTags(content: string): Promise<TagSuggestion[]> {
|
||||
try {
|
||||
const { object } = await generateObject({
|
||||
model: this.model,
|
||||
schema: z.object({
|
||||
tags: z.array(z.object({
|
||||
tag: z.string().describe('Le nom du tag, court et en minuscules'),
|
||||
confidence: z.number().min(0).max(1).describe('Le niveau de confiance entre 0 et 1')
|
||||
}))
|
||||
}),
|
||||
prompt: `Analyse la note suivante et suggère entre 1 et 5 tags pertinents.
|
||||
Contenu de la note: "${content}"`,
|
||||
});
|
||||
|
||||
return object.tags;
|
||||
} catch (e) {
|
||||
console.error('Erreur génération tags DeepSeek:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getEmbeddings(text: string): Promise<number[]> {
|
||||
try {
|
||||
const { embedding } = await embed({
|
||||
model: this.embeddingModel,
|
||||
value: text,
|
||||
});
|
||||
return embedding;
|
||||
} catch (e) {
|
||||
console.error('Erreur embeddings DeepSeek:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,20 @@
|
||||
import { openai } from '@ai-sdk/openai';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, embed } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion } from '../types';
|
||||
|
||||
export class OpenAIProvider implements AIProvider {
|
||||
private model: any;
|
||||
private embeddingModel: any;
|
||||
|
||||
constructor(apiKey: string, modelName: string = 'gpt-4o-mini') {
|
||||
this.model = openai(modelName);
|
||||
constructor(apiKey: string, modelName: string = 'gpt-4o-mini', embeddingModelName: string = 'text-embedding-3-small') {
|
||||
// Create OpenAI client with API key
|
||||
const openaiClient = createOpenAI({
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = openaiClient(modelName);
|
||||
this.embeddingModel = openaiClient.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
async generateTags(content: string): Promise<TagSuggestion[]> {
|
||||
@@ -20,7 +27,7 @@ export class OpenAIProvider implements AIProvider {
|
||||
confidence: z.number().min(0).max(1).describe('Le niveau de confiance entre 0 et 1')
|
||||
}))
|
||||
}),
|
||||
prompt: `Analyse la note suivante et suggère entre 1 et 5 tags pertinents.
|
||||
prompt: `Analyse la note suivante et suggère entre 1 et 5 tags pertinents.
|
||||
Contenu de la note: "${content}"`,
|
||||
});
|
||||
|
||||
@@ -34,7 +41,7 @@ export class OpenAIProvider implements AIProvider {
|
||||
async getEmbeddings(text: string): Promise<number[]> {
|
||||
try {
|
||||
const { embedding } = await embed({
|
||||
model: openai.embedding('text-embedding-3-small'),
|
||||
model: this.embeddingModel,
|
||||
value: text,
|
||||
});
|
||||
return embedding;
|
||||
|
||||
54
keep-notes/lib/ai/providers/openrouter.ts
Normal file
54
keep-notes/lib/ai/providers/openrouter.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, embed } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion } from '../types';
|
||||
|
||||
export class OpenRouterProvider implements AIProvider {
|
||||
private model: any;
|
||||
private embeddingModel: any;
|
||||
|
||||
constructor(apiKey: string, modelName: string = 'anthropic/claude-3-haiku', embeddingModelName: string = 'openai/text-embedding-3-small') {
|
||||
// Create OpenAI-compatible client for OpenRouter
|
||||
const openrouter = createOpenAI({
|
||||
baseURL: 'https://openrouter.ai/api/v1',
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = openrouter(modelName);
|
||||
this.embeddingModel = openrouter.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
async generateTags(content: string): Promise<TagSuggestion[]> {
|
||||
try {
|
||||
const { object } = await generateObject({
|
||||
model: this.model,
|
||||
schema: z.object({
|
||||
tags: z.array(z.object({
|
||||
tag: z.string().describe('Le nom du tag, court et en minuscules'),
|
||||
confidence: z.number().min(0).max(1).describe('Le niveau de confiance entre 0 et 1')
|
||||
}))
|
||||
}),
|
||||
prompt: `Analyse la note suivante et suggère entre 1 et 5 tags pertinents.
|
||||
Contenu de la note: "${content}"`,
|
||||
});
|
||||
|
||||
return object.tags;
|
||||
} catch (e) {
|
||||
console.error('Erreur génération tags OpenRouter:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getEmbeddings(text: string): Promise<number[]> {
|
||||
try {
|
||||
const { embedding } = await embed({
|
||||
model: this.embeddingModel,
|
||||
value: text,
|
||||
});
|
||||
return embedding;
|
||||
} catch (e) {
|
||||
console.error('Erreur embeddings OpenRouter:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user