chore: snapshot before performance optimization
This commit is contained in:
68
keep-notes/fix_ollama_provider.py
Normal file
68
keep-notes/fix_ollama_provider.py
Normal file
@@ -0,0 +1,68 @@
|
||||
with open('lib/ai/providers/ollama.ts', 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Restore generateTitles and generateText completely
|
||||
# I will find the boundaries of generateTitles and generateText and replace them.
|
||||
|
||||
import re
|
||||
|
||||
# We will cut the string from async generateTitles to the end of class, and replace it manually.
|
||||
start_index = content.find('async generateTitles(prompt: string): Promise<TitleSuggestion[]> {')
|
||||
|
||||
if start_index != -1:
|
||||
content = content[:start_index] + """async generateTitles(prompt: string): Promise<TitleSuggestion[]> {
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/generate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: this.modelName,
|
||||
prompt: `${prompt}\\n\\nRéponds UNIQUEMENT sous forme de tableau JSON : [{"title": "string", "confidence": number}]`,
|
||||
stream: false,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) throw new Error(`Ollama error: ${response.statusText}`);
|
||||
|
||||
const data = await response.json();
|
||||
const text = data.response;
|
||||
|
||||
// Extraire le JSON de la réponse
|
||||
const jsonMatch = text.match(/\[\s*\{[\s\S]*\}\s*\]/);
|
||||
if (jsonMatch) {
|
||||
return JSON.parse(jsonMatch[0]);
|
||||
}
|
||||
|
||||
return [];
|
||||
} catch (e) {
|
||||
console.error('Erreur génération titres Ollama:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async generateText(prompt: string): Promise<string> {
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/generate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: this.modelName,
|
||||
prompt: prompt,
|
||||
stream: false,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) throw new Error(`Ollama error: ${response.statusText}`);
|
||||
|
||||
const data = await response.json();
|
||||
return data.response.trim();
|
||||
} catch (e) {
|
||||
console.error('Erreur génération texte Ollama:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
with open('lib/ai/providers/ollama.ts', 'w') as f:
|
||||
f.write(content)
|
||||
Reference in New Issue
Block a user