- Added multi-provider AI infrastructure (OpenAI/Ollama) - Implemented real-time tag suggestions with debounced analysis - Created AI diagnostics and database maintenance tools in Settings - Added automated garbage collection for orphan labels - Refined UX with deterministic color hashing and interactive ghost tags
77 lines
2.4 KiB
TypeScript
77 lines
2.4 KiB
TypeScript
import { AIProvider, TagSuggestion } from '../types';
|
|
|
|
export class OllamaProvider implements AIProvider {
|
|
private baseUrl: string;
|
|
private modelName: string;
|
|
|
|
constructor(baseUrl: string = 'http://localhost:11434/api', modelName: string = 'llama3') {
|
|
this.baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
|
this.modelName = modelName;
|
|
}
|
|
|
|
async generateTags(content: string): Promise<TagSuggestion[]> {
|
|
try {
|
|
const response = await fetch(`${this.baseUrl}/generate`, {
|
|
method: 'POST',
|
|
headers: { 'Content-Type': 'application/json' },
|
|
body: JSON.stringify({
|
|
model: this.modelName,
|
|
prompt: `Analyse la note suivante et extrais les concepts clés sous forme de tags courts (1-3 mots max).
|
|
|
|
Règles:
|
|
- Pas de mots de liaison (le, la, pour, et...).
|
|
- Garde les expressions composées ensemble (ex: "semaine prochaine", "New York").
|
|
- Normalise en minuscules sauf noms propres.
|
|
- Maximum 5 tags.
|
|
|
|
Réponds UNIQUEMENT sous forme de liste JSON d'objets : [{"tag": "string", "confidence": number}].
|
|
|
|
Contenu de la note: "${content}"`,
|
|
stream: false,
|
|
}),
|
|
});
|
|
|
|
if (!response.ok) throw new Error(`Ollama error: ${response.statusText}`);
|
|
|
|
const data = await response.json();
|
|
const text = data.response;
|
|
|
|
const jsonMatch = text.match(/\[\s*\{.*\}\s*\]/s);
|
|
if (jsonMatch) {
|
|
return JSON.parse(jsonMatch[0]);
|
|
}
|
|
|
|
// Support pour le format { "tags": [...] }
|
|
const objectMatch = text.match(/\{\s*"tags"\s*:\s*(\[.*\])\s*\}/s);
|
|
if (objectMatch && objectMatch[1]) {
|
|
return JSON.parse(objectMatch[1]);
|
|
}
|
|
|
|
return [];
|
|
} catch (e) {
|
|
console.error('Erreur API directe Ollama:', e);
|
|
return [];
|
|
}
|
|
}
|
|
|
|
async getEmbeddings(text: string): Promise<number[]> {
|
|
try {
|
|
const response = await fetch(`${this.baseUrl}/embeddings`, {
|
|
method: 'POST',
|
|
headers: { 'Content-Type': 'application/json' },
|
|
body: JSON.stringify({
|
|
model: this.modelName,
|
|
prompt: text,
|
|
}),
|
|
});
|
|
|
|
if (!response.ok) throw new Error(`Ollama error: ${response.statusText}`);
|
|
|
|
const data = await response.json();
|
|
return data.embedding;
|
|
} catch (e) {
|
|
console.error('Erreur embeddings directs Ollama:', e);
|
|
return [];
|
|
}
|
|
}
|
|
} |