refactor(ux): consolidate BMAD skills, update design system, and clean up Prisma generated client
This commit is contained in:
167
keep-notes/lib/agent-email-template.ts
Normal file
167
keep-notes/lib/agent-email-template.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
import { randomUUID } from 'crypto'
|
||||
|
||||
export interface EmailAttachment {
|
||||
filename: string
|
||||
content: Buffer
|
||||
cid: string
|
||||
}
|
||||
|
||||
interface AgentEmailParams {
|
||||
agentName: string
|
||||
content: string
|
||||
appUrl: string
|
||||
userName?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a local image file from the public directory.
|
||||
*/
|
||||
async function readLocalImage(relativePath: string): Promise<Buffer | null> {
|
||||
try {
|
||||
const filePath = path.join(process.cwd(), 'public', relativePath)
|
||||
return await fs.readFile(filePath)
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert markdown to simple HTML suitable for email clients.
|
||||
* Replaces local image references with cid: placeholders for inline attachments.
|
||||
* Returns the HTML and a list of attachments to include.
|
||||
*/
|
||||
export async function markdownToEmailHtml(md: string, appUrl: string): Promise<{ html: string; attachments: EmailAttachment[] }> {
|
||||
let html = md
|
||||
const attachments: EmailAttachment[] = []
|
||||
const baseUrl = appUrl.replace(/\/$/, '')
|
||||
|
||||
// Remove the execution footer (agent trace)
|
||||
html = html.replace(/\n---\n\n_\$Agent execution:[\s\S]*$/, '')
|
||||
html = html.replace(/\n---\n\n_Agent execution:[\s\S]*$/, '')
|
||||
|
||||
// Horizontal rules
|
||||
html = html.replace(/^---+$/gm, '<hr style="border:none;border-top:1px solid #e5e7eb;margin:20px 0;">')
|
||||
|
||||
// Headings
|
||||
html = html.replace(/^### (.+)$/gm, '<h3 style="margin:16px 0 8px;font-size:15px;font-weight:600;color:#1f2937;">$1</h3>')
|
||||
html = html.replace(/^## (.+)$/gm, '<h2 style="margin:20px 0 10px;font-size:16px;font-weight:700;color:#111827;">$1</h2>')
|
||||
html = html.replace(/^# (.+)$/gm, '<h1 style="margin:0 0 16px;font-size:18px;font-weight:700;color:#111827;">$1</h1>')
|
||||
|
||||
// Bold and italic
|
||||
html = html.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>')
|
||||
html = html.replace(/\*(.+?)\*/g, '<em style="color:#6b7280;">$1</em>')
|
||||
|
||||
// Unordered list items
|
||||
html = html.replace(/^(\s*)[-*] (.+)$/gm, '$1<li style="margin:4px 0;padding-left:4px;">$2</li>')
|
||||
|
||||
// Wrap consecutive <li> in <ul>
|
||||
html = html.replace(/((?:<li[^>]*>.*?<\/li>\s*)+)/g, (match) => {
|
||||
return '<ul style="margin:8px 0;padding-left:20px;list-style-type:disc;">' + match + '</ul>'
|
||||
})
|
||||
|
||||
// Images  — local images become CID attachments, external stay as-is
|
||||
const imageMatches = [...html.matchAll(/!\[([^\]]*)\]\(([^)]+)\)/g)]
|
||||
for (const match of imageMatches) {
|
||||
const [fullMatch, alt, url] = match
|
||||
let imgTag: string
|
||||
|
||||
if (url.startsWith('/uploads/')) {
|
||||
// Local image: read file and attach as CID
|
||||
const buffer = await readLocalImage(url)
|
||||
if (buffer) {
|
||||
const cid = `img-${randomUUID()}`
|
||||
const ext = path.extname(url).toLowerCase() || '.jpg'
|
||||
attachments.push({ filename: `image${ext}`, content: buffer, cid })
|
||||
imgTag = `<img src="cid:${cid}" alt="${alt}" style="max-width:100%;border-radius:8px;margin:12px 0;" />`
|
||||
} else {
|
||||
// Fallback to absolute URL if file not found
|
||||
imgTag = `<img src="${baseUrl}${url}" alt="${alt}" style="max-width:100%;border-radius:8px;margin:12px 0;" />`
|
||||
}
|
||||
} else {
|
||||
imgTag = `<img src="${url}" alt="${alt}" style="max-width:100%;border-radius:8px;margin:12px 0;" />`
|
||||
}
|
||||
html = html.replace(fullMatch, imgTag)
|
||||
}
|
||||
|
||||
// Links
|
||||
html = html.replace(/\[([^\]]+)\]\(([^)]+)\)/g, (_match, text, url) => {
|
||||
const absoluteUrl = url.startsWith('/') ? `${baseUrl}${url}` : url
|
||||
return `<a href="${absoluteUrl}" style="color:#3b82f6;text-decoration:none;">${text}</a>`
|
||||
})
|
||||
|
||||
// Paragraphs
|
||||
html = html.replace(/\n\n+/g, '</p><p style="margin:0 0 12px;">')
|
||||
html = html.replace(/\n/g, '<br>')
|
||||
html = '<p style="margin:0 0 12px;">' + html + '</p>'
|
||||
html = html.replace(/<p[^>]*>\s*<\/p>/g, '')
|
||||
|
||||
return { html, attachments }
|
||||
}
|
||||
|
||||
export async function getAgentEmailTemplate({ agentName, content, appUrl, userName }: AgentEmailParams): Promise<{ html: string; attachments: EmailAttachment[] }> {
|
||||
const greeting = userName ? `Bonjour ${userName},` : 'Bonjour,'
|
||||
const { html: htmlContent, attachments } = await markdownToEmailHtml(content, appUrl)
|
||||
|
||||
// Extract a preview (first ~150 chars of plain text for subtitle)
|
||||
const plainText = content
|
||||
.replace(/^#{1,3}\s+/gm, '')
|
||||
.replace(/\*\*/g, '')
|
||||
.replace(/\[([^\]]+)\]\([^)]+\)/g, '$1')
|
||||
.replace(/[-*]\s+/g, '')
|
||||
.replace(/\n+/g, ' ')
|
||||
.trim()
|
||||
|
||||
const html = `<!DOCTYPE html>
|
||||
<html lang="fr">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>${agentName}</title>
|
||||
<style>
|
||||
body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Helvetica, Arial, sans-serif; line-height: 1.6; color: #374151; background: #f3f4f6; margin: 0; padding: 0; }
|
||||
.wrapper { width: 100%; background: #f3f4f6; padding: 32px 16px; }
|
||||
.container { max-width: 620px; margin: 0 auto; }
|
||||
.card { background: #ffffff; border-radius: 16px; overflow: hidden; box-shadow: 0 1px 3px rgba(0,0,0,0.08), 0 4px 12px rgba(0,0,0,0.04); }
|
||||
.card-header { background: linear-gradient(135deg, #1e293b 0%, #334155 100%); padding: 28px 32px; }
|
||||
.card-header h1 { margin: 0; font-size: 20px; font-weight: 700; color: #ffffff; }
|
||||
.card-header .subtitle { margin: 6px 0 0; font-size: 13px; color: #94a3b8; }
|
||||
.card-header .badge { display: inline-block; background: rgba(59,130,246,0.2); color: #93c5fd; font-size: 11px; font-weight: 600; padding: 3px 10px; border-radius: 9999px; margin-top: 10px; letter-spacing: 0.5px; text-transform: uppercase; }
|
||||
.card-body { padding: 28px 32px; font-size: 14px; color: #374151; }
|
||||
.card-footer { padding: 20px 32px; border-top: 1px solid #f1f5f9; text-align: center; background: #fafbfc; }
|
||||
.button { display: inline-block; padding: 12px 28px; background-color: #1e293b; color: #ffffff; text-decoration: none; border-radius: 10px; font-weight: 600; font-size: 14px; letter-spacing: 0.3px; }
|
||||
.button:hover { background-color: #334155; }
|
||||
.footer-text { margin-top: 20px; font-size: 12px; color: #9ca3af; text-align: center; }
|
||||
.footer-text a { color: #64748b; text-decoration: none; }
|
||||
.footer-text a:hover { text-decoration: underline; }
|
||||
.date { font-size: 12px; color: #9ca3af; margin-top: 4px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="wrapper">
|
||||
<div class="container">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h1>${agentName}</h1>
|
||||
<div class="subtitle">${plainText.substring(0, 120)}${plainText.length > 120 ? '...' : ''}</div>
|
||||
<span class="badge">Synthèse automatique</span>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p style="margin:0 0 8px;color:#6b7280;font-size:13px;">${greeting}</p>
|
||||
<p style="margin:0 0 20px;color:#6b7280;font-size:13px;">Votre agent <strong style="color:#1f2937;">${agentName}</strong> a terminé son exécution. Voici les résultats :</p>
|
||||
<hr style="border:none;border-top:1px solid #f1f5f9;margin:0 0 20px;">
|
||||
${htmlContent}
|
||||
</div>
|
||||
<div class="card-footer">
|
||||
<a href="${appUrl}" class="button">Ouvrir dans Memento</a>
|
||||
</div>
|
||||
</div>
|
||||
<p class="footer-text">Cet email a été envoyé par votre agent Memento · <a href="${appUrl}/agents">Gérer mes agents</a></p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>`
|
||||
|
||||
return { html, attachments }
|
||||
}
|
||||
@@ -65,10 +65,18 @@ function getProviderInstance(providerType: ProviderType, config: Record<string,
|
||||
|
||||
export function getTagsProvider(config?: Record<string, string>): AIProvider {
|
||||
// Check database config first, then environment variables
|
||||
const providerType = (config?.AI_PROVIDER_TAGS || process.env.AI_PROVIDER_TAGS);
|
||||
const providerType = (
|
||||
config?.AI_PROVIDER_TAGS ||
|
||||
config?.AI_PROVIDER_EMBEDDING ||
|
||||
config?.AI_PROVIDER ||
|
||||
process.env.AI_PROVIDER_TAGS ||
|
||||
process.env.AI_PROVIDER_EMBEDDING ||
|
||||
process.env.AI_PROVIDER
|
||||
);
|
||||
|
||||
// If no provider is configured, throw a clear error
|
||||
if (!providerType) {
|
||||
console.error('[getTagsProvider] FATAL: No provider configured. Config received:', config);
|
||||
throw new Error(
|
||||
'AI_PROVIDER_TAGS is not configured. Please set it in the admin settings or environment variables. ' +
|
||||
'Options: ollama, openai, custom'
|
||||
@@ -84,10 +92,18 @@ export function getTagsProvider(config?: Record<string, string>): AIProvider {
|
||||
|
||||
export function getEmbeddingsProvider(config?: Record<string, string>): AIProvider {
|
||||
// Check database config first, then environment variables
|
||||
const providerType = (config?.AI_PROVIDER_EMBEDDING || process.env.AI_PROVIDER_EMBEDDING);
|
||||
const providerType = (
|
||||
config?.AI_PROVIDER_EMBEDDING ||
|
||||
config?.AI_PROVIDER_TAGS ||
|
||||
config?.AI_PROVIDER ||
|
||||
process.env.AI_PROVIDER_EMBEDDING ||
|
||||
process.env.AI_PROVIDER_TAGS ||
|
||||
process.env.AI_PROVIDER
|
||||
);
|
||||
|
||||
// If no provider is configured, throw a clear error
|
||||
if (!providerType) {
|
||||
console.error('[getEmbeddingsProvider] FATAL: No provider configured. Config received:', config);
|
||||
throw new Error(
|
||||
'AI_PROVIDER_EMBEDDING is not configured. Please set it in the admin settings or environment variables. ' +
|
||||
'Options: ollama, openai, custom'
|
||||
@@ -104,3 +120,39 @@ export function getEmbeddingsProvider(config?: Record<string, string>): AIProvid
|
||||
export function getAIProvider(config?: Record<string, string>): AIProvider {
|
||||
return getEmbeddingsProvider(config);
|
||||
}
|
||||
|
||||
export function getChatProvider(config?: Record<string, string>): AIProvider {
|
||||
// Check database config first, then environment variables
|
||||
// Fallback cascade: chat -> tags -> embeddings
|
||||
const providerType = (
|
||||
config?.AI_PROVIDER_CHAT ||
|
||||
config?.AI_PROVIDER_TAGS ||
|
||||
config?.AI_PROVIDER_EMBEDDING ||
|
||||
config?.AI_PROVIDER ||
|
||||
process.env.AI_PROVIDER_CHAT ||
|
||||
process.env.AI_PROVIDER_TAGS ||
|
||||
process.env.AI_PROVIDER_EMBEDDING ||
|
||||
process.env.AI_PROVIDER
|
||||
);
|
||||
|
||||
// If no provider is configured, throw a clear error
|
||||
if (!providerType) {
|
||||
console.error('[getChatProvider] FATAL: No provider configured. Config received:', config);
|
||||
throw new Error(
|
||||
'AI_PROVIDER_CHAT is not configured. Please set it in the admin settings or environment variables. ' +
|
||||
'Options: ollama, openai, custom'
|
||||
);
|
||||
}
|
||||
|
||||
const provider = providerType.toLowerCase() as ProviderType;
|
||||
const modelName = (
|
||||
config?.AI_MODEL_CHAT ||
|
||||
process.env.AI_MODEL_CHAT ||
|
||||
config?.AI_MODEL_TAGS ||
|
||||
process.env.AI_MODEL_TAGS ||
|
||||
'granite4:latest'
|
||||
);
|
||||
const embeddingModelName = config?.AI_MODEL_EMBEDDING || process.env.AI_MODEL_EMBEDDING || 'embeddinggemma:latest';
|
||||
|
||||
return getProviderInstance(provider, config || {}, modelName, embeddingModelName);
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, generateText, embed } from 'ai';
|
||||
import { generateObject, generateText as aiGenerateText, embed, stepCountIs } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion } from '../types';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion, ToolUseOptions, ToolCallResult } from '../types';
|
||||
|
||||
export class CustomOpenAIProvider implements AIProvider {
|
||||
private model: any;
|
||||
private embeddingModel: any;
|
||||
private apiKey: string;
|
||||
private baseUrl: string;
|
||||
|
||||
constructor(
|
||||
apiKey: string,
|
||||
@@ -13,13 +15,22 @@ export class CustomOpenAIProvider implements AIProvider {
|
||||
modelName: string = 'gpt-4o-mini',
|
||||
embeddingModelName: string = 'text-embedding-3-small'
|
||||
) {
|
||||
this.apiKey = apiKey;
|
||||
this.baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||
// Create OpenAI-compatible client with custom base URL
|
||||
// Use .chat() to force /chat/completions endpoint (avoids Responses API)
|
||||
const customClient = createOpenAI({
|
||||
baseURL: baseUrl,
|
||||
apiKey: apiKey,
|
||||
fetch: async (url, options) => {
|
||||
const headers = new Headers(options?.headers);
|
||||
headers.set('HTTP-Referer', 'https://localhost:3000');
|
||||
headers.set('X-Title', 'Memento AI');
|
||||
return fetch(url, { ...options, headers });
|
||||
}
|
||||
});
|
||||
|
||||
this.model = customClient(modelName);
|
||||
this.model = customClient.chat(modelName);
|
||||
this.embeddingModel = customClient.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
@@ -79,7 +90,7 @@ export class CustomOpenAIProvider implements AIProvider {
|
||||
|
||||
async generateText(prompt: string): Promise<string> {
|
||||
try {
|
||||
const { text } = await generateText({
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
prompt: prompt,
|
||||
});
|
||||
@@ -90,4 +101,47 @@ export class CustomOpenAIProvider implements AIProvider {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async chat(messages: any[], systemPrompt?: string): Promise<any> {
|
||||
try {
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
system: systemPrompt,
|
||||
messages: messages,
|
||||
});
|
||||
|
||||
return { text: text.trim() };
|
||||
} catch (e) {
|
||||
console.error('Erreur chat Custom OpenAI:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async generateWithTools(options: ToolUseOptions): Promise<ToolCallResult> {
|
||||
const { tools, maxSteps = 10, systemPrompt, messages, prompt } = options
|
||||
const opts: Record<string, any> = {
|
||||
model: this.model,
|
||||
tools,
|
||||
stopWhen: stepCountIs(maxSteps),
|
||||
}
|
||||
if (systemPrompt) opts.system = systemPrompt
|
||||
if (messages) opts.messages = messages
|
||||
else if (prompt) opts.prompt = prompt
|
||||
|
||||
const result = await aiGenerateText(opts as any)
|
||||
return {
|
||||
toolCalls: result.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: result.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || [],
|
||||
text: result.text,
|
||||
steps: result.steps?.map((step: any) => ({
|
||||
text: step.text,
|
||||
toolCalls: step.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: step.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || []
|
||||
})) || []
|
||||
}
|
||||
}
|
||||
|
||||
getModel() {
|
||||
return this.model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, generateText, embed } from 'ai';
|
||||
import { generateObject, generateText as aiGenerateText, embed, stepCountIs } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion } from '../types';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion, ToolUseOptions, ToolCallResult } from '../types';
|
||||
|
||||
export class DeepSeekProvider implements AIProvider {
|
||||
private model: any;
|
||||
@@ -14,7 +14,7 @@ export class DeepSeekProvider implements AIProvider {
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = deepseek(modelName);
|
||||
this.model = deepseek.chat(modelName);
|
||||
this.embeddingModel = deepseek.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ export class DeepSeekProvider implements AIProvider {
|
||||
|
||||
async generateText(prompt: string): Promise<string> {
|
||||
try {
|
||||
const { text } = await generateText({
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
prompt: prompt,
|
||||
});
|
||||
@@ -85,4 +85,47 @@ export class DeepSeekProvider implements AIProvider {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async chat(messages: any[], systemPrompt?: string): Promise<any> {
|
||||
try {
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
system: systemPrompt,
|
||||
messages: messages,
|
||||
});
|
||||
|
||||
return { text: text.trim() };
|
||||
} catch (e) {
|
||||
console.error('Erreur chat DeepSeek:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async generateWithTools(options: ToolUseOptions): Promise<ToolCallResult> {
|
||||
const { tools, maxSteps = 10, systemPrompt, messages, prompt } = options
|
||||
const opts: Record<string, any> = {
|
||||
model: this.model,
|
||||
tools,
|
||||
stopWhen: stepCountIs(maxSteps),
|
||||
}
|
||||
if (systemPrompt) opts.system = systemPrompt
|
||||
if (messages) opts.messages = messages
|
||||
else if (prompt) opts.prompt = prompt
|
||||
|
||||
const result = await aiGenerateText(opts as any)
|
||||
return {
|
||||
toolCalls: result.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: result.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || [],
|
||||
text: result.text,
|
||||
steps: result.steps?.map((step: any) => ({
|
||||
text: step.text,
|
||||
toolCalls: step.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: step.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || []
|
||||
})) || []
|
||||
}
|
||||
}
|
||||
|
||||
getModel() {
|
||||
return this.model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion } from '../types';
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateText as aiGenerateText, stepCountIs } from 'ai';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion, ToolUseOptions, ToolCallResult } from '../types';
|
||||
|
||||
export class OllamaProvider implements AIProvider {
|
||||
private baseUrl: string;
|
||||
private modelName: string;
|
||||
private embeddingModelName: string;
|
||||
private model: any;
|
||||
|
||||
constructor(baseUrl: string, modelName: string = 'llama3', embeddingModelName?: string) {
|
||||
if (!baseUrl) {
|
||||
@@ -13,6 +16,15 @@ export class OllamaProvider implements AIProvider {
|
||||
this.baseUrl = baseUrl.endsWith('/api') ? baseUrl : `${baseUrl}/api`;
|
||||
this.modelName = modelName;
|
||||
this.embeddingModelName = embeddingModelName || modelName;
|
||||
|
||||
// Create OpenAI-compatible model for streaming support
|
||||
// Ollama exposes /v1/chat/completions which is compatible with the OpenAI SDK
|
||||
const cleanUrl = this.baseUrl.replace(/\/api$/, '');
|
||||
const ollamaClient = createOpenAI({
|
||||
baseURL: `${cleanUrl}/v1`,
|
||||
apiKey: 'ollama',
|
||||
});
|
||||
this.model = ollamaClient.chat(modelName);
|
||||
}
|
||||
|
||||
async generateTags(content: string, language: string = "en"): Promise<TagSuggestion[]> {
|
||||
@@ -148,4 +160,63 @@ Note content: "${content}"`;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async chat(messages: any[], systemPrompt?: string): Promise<any> {
|
||||
try {
|
||||
const ollamaMessages = messages.map(m => ({
|
||||
role: m.role,
|
||||
content: m.content
|
||||
}));
|
||||
|
||||
if (systemPrompt) {
|
||||
ollamaMessages.unshift({ role: 'system', content: systemPrompt });
|
||||
}
|
||||
|
||||
const response = await fetch(`${this.baseUrl}/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: this.modelName,
|
||||
messages: ollamaMessages,
|
||||
stream: false,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) throw new Error(`Ollama error: ${response.statusText}`);
|
||||
|
||||
const data = await response.json();
|
||||
return { text: data.message?.content?.trim() || '' };
|
||||
} catch (e) {
|
||||
console.error('Erreur chat Ollama:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
getModel() {
|
||||
return this.model;
|
||||
}
|
||||
|
||||
async generateWithTools(options: ToolUseOptions): Promise<ToolCallResult> {
|
||||
const { tools, maxSteps = 10, systemPrompt, messages, prompt } = options
|
||||
const opts: Record<string, any> = {
|
||||
model: this.model,
|
||||
tools,
|
||||
stopWhen: stepCountIs(maxSteps),
|
||||
}
|
||||
if (systemPrompt) opts.system = systemPrompt
|
||||
if (messages) opts.messages = messages
|
||||
else if (prompt) opts.prompt = prompt
|
||||
|
||||
const result = await aiGenerateText(opts as any)
|
||||
return {
|
||||
toolCalls: result.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: result.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || [],
|
||||
text: result.text,
|
||||
steps: result.steps?.map((step: any) => ({
|
||||
text: step.text,
|
||||
toolCalls: step.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: step.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || []
|
||||
})) || []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, generateText, embed } from 'ai';
|
||||
import { generateObject, generateText as aiGenerateText, embed, stepCountIs } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion } from '../types';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion, ToolUseOptions, ToolCallResult } from '../types';
|
||||
|
||||
export class OpenAIProvider implements AIProvider {
|
||||
private model: any;
|
||||
@@ -9,11 +9,12 @@ export class OpenAIProvider implements AIProvider {
|
||||
|
||||
constructor(apiKey: string, modelName: string = 'gpt-4o-mini', embeddingModelName: string = 'text-embedding-3-small') {
|
||||
// Create OpenAI client with API key
|
||||
// Use .chat() to force /chat/completions endpoint (avoids Responses API)
|
||||
const openaiClient = createOpenAI({
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = openaiClient(modelName);
|
||||
this.model = openaiClient.chat(modelName);
|
||||
this.embeddingModel = openaiClient.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
@@ -73,7 +74,7 @@ export class OpenAIProvider implements AIProvider {
|
||||
|
||||
async generateText(prompt: string): Promise<string> {
|
||||
try {
|
||||
const { text } = await generateText({
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
prompt: prompt,
|
||||
});
|
||||
@@ -84,4 +85,47 @@ export class OpenAIProvider implements AIProvider {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async chat(messages: any[], systemPrompt?: string): Promise<any> {
|
||||
try {
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
system: systemPrompt,
|
||||
messages: messages,
|
||||
});
|
||||
|
||||
return { text: text.trim() };
|
||||
} catch (e) {
|
||||
console.error('Erreur chat OpenAI:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async generateWithTools(options: ToolUseOptions): Promise<ToolCallResult> {
|
||||
const { tools, maxSteps = 10, systemPrompt, messages, prompt } = options
|
||||
const opts: Record<string, any> = {
|
||||
model: this.model,
|
||||
tools,
|
||||
stopWhen: stepCountIs(maxSteps),
|
||||
}
|
||||
if (systemPrompt) opts.system = systemPrompt
|
||||
if (messages) opts.messages = messages
|
||||
else if (prompt) opts.prompt = prompt
|
||||
|
||||
const result = await aiGenerateText(opts as any)
|
||||
return {
|
||||
toolCalls: result.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: result.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || [],
|
||||
text: result.text,
|
||||
steps: result.steps?.map((step: any) => ({
|
||||
text: step.text,
|
||||
toolCalls: step.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: step.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || []
|
||||
})) || []
|
||||
}
|
||||
}
|
||||
|
||||
getModel() {
|
||||
return this.model;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { generateObject, generateText, embed } from 'ai';
|
||||
import { generateObject, generateText as aiGenerateText, embed, stepCountIs } from 'ai';
|
||||
import { z } from 'zod';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion } from '../types';
|
||||
import { AIProvider, TagSuggestion, TitleSuggestion, ToolUseOptions, ToolCallResult } from '../types';
|
||||
|
||||
export class OpenRouterProvider implements AIProvider {
|
||||
private model: any;
|
||||
@@ -14,7 +14,7 @@ export class OpenRouterProvider implements AIProvider {
|
||||
apiKey: apiKey,
|
||||
});
|
||||
|
||||
this.model = openrouter(modelName);
|
||||
this.model = openrouter.chat(modelName);
|
||||
this.embeddingModel = openrouter.embedding(embeddingModelName);
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ export class OpenRouterProvider implements AIProvider {
|
||||
|
||||
async generateText(prompt: string): Promise<string> {
|
||||
try {
|
||||
const { text } = await generateText({
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
prompt: prompt,
|
||||
});
|
||||
@@ -85,4 +85,47 @@ export class OpenRouterProvider implements AIProvider {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async chat(messages: any[], systemPrompt?: string): Promise<any> {
|
||||
try {
|
||||
const { text } = await aiGenerateText({
|
||||
model: this.model,
|
||||
system: systemPrompt,
|
||||
messages: messages,
|
||||
});
|
||||
|
||||
return { text: text.trim() };
|
||||
} catch (e) {
|
||||
console.error('Erreur chat OpenRouter:', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async generateWithTools(options: ToolUseOptions): Promise<ToolCallResult> {
|
||||
const { tools, maxSteps = 10, systemPrompt, messages, prompt } = options
|
||||
const opts: Record<string, any> = {
|
||||
model: this.model,
|
||||
tools,
|
||||
stopWhen: stepCountIs(maxSteps),
|
||||
}
|
||||
if (systemPrompt) opts.system = systemPrompt
|
||||
if (messages) opts.messages = messages
|
||||
else if (prompt) opts.prompt = prompt
|
||||
|
||||
const result = await aiGenerateText(opts as any)
|
||||
return {
|
||||
toolCalls: result.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: result.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || [],
|
||||
text: result.text,
|
||||
steps: result.steps?.map((step: any) => ({
|
||||
text: step.text,
|
||||
toolCalls: step.toolCalls?.map((tc: any) => ({ toolName: tc.toolName, input: tc.input })) || [],
|
||||
toolResults: step.toolResults?.map((tr: any) => ({ toolName: tr.toolName, input: tr.input, output: tr.output })) || []
|
||||
})) || []
|
||||
}
|
||||
}
|
||||
|
||||
getModel() {
|
||||
return this.model;
|
||||
}
|
||||
}
|
||||
|
||||
1106
keep-notes/lib/ai/services/agent-executor.service.ts
Normal file
1106
keep-notes/lib/ai/services/agent-executor.service.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -62,6 +62,7 @@ export class AutoLabelCreationService {
|
||||
where: {
|
||||
notebookId,
|
||||
userId,
|
||||
trashedAt: null,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
@@ -471,7 +472,7 @@ Deine Antwort (nur JSON):
|
||||
await prisma.note.update({
|
||||
where: { id: noteId },
|
||||
data: {
|
||||
labels: names as any,
|
||||
labels: JSON.stringify(names),
|
||||
labelRelations: {
|
||||
connect: { id: label.id },
|
||||
},
|
||||
|
||||
@@ -45,6 +45,7 @@ export class BatchOrganizationService {
|
||||
where: {
|
||||
userId,
|
||||
notebookId: null,
|
||||
trashedAt: null,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
|
||||
141
keep-notes/lib/ai/services/chat.service.ts
Normal file
141
keep-notes/lib/ai/services/chat.service.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
/**
|
||||
* Chat Service
|
||||
* Handles conversational AI with context retrieval (RAG)
|
||||
*/
|
||||
|
||||
import { semanticSearchService } from './semantic-search.service'
|
||||
import { getChatProvider } from '../factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { auth } from '@/auth'
|
||||
import { loadTranslations, getTranslationValue, SupportedLanguage } from '@/lib/i18n'
|
||||
|
||||
// Default untitled text for fallback
|
||||
const DEFAULT_UNTITLED = 'Untitled'
|
||||
|
||||
export interface ChatMessage {
|
||||
role: 'user' | 'assistant' | 'system'
|
||||
content: string
|
||||
}
|
||||
|
||||
export interface ChatResponse {
|
||||
message: string
|
||||
conversationId?: string
|
||||
suggestedNotes?: Array<{ id: string; title: string }>
|
||||
}
|
||||
|
||||
export class ChatService {
|
||||
/**
|
||||
* Main chat entry point with context retrieval
|
||||
*/
|
||||
async chat(
|
||||
message: string,
|
||||
conversationId?: string,
|
||||
notebookId?: string,
|
||||
language: SupportedLanguage = 'en'
|
||||
): Promise<ChatResponse> {
|
||||
const session = await auth()
|
||||
if (!session?.user?.id) {
|
||||
throw new Error('Unauthorized')
|
||||
}
|
||||
const userId = session.user.id
|
||||
|
||||
// Load translations for the requested language
|
||||
const translations = await loadTranslations(language)
|
||||
const untitledText = getTranslationValue(translations, 'notes.untitled') || DEFAULT_UNTITLED
|
||||
const noNotesFoundText = getTranslationValue(translations, 'chat.noNotesFoundForContext') ||
|
||||
'No relevant notes found for this question. Answer with your general knowledge.'
|
||||
|
||||
// 1. Manage Conversation
|
||||
let conversation: any
|
||||
if (conversationId) {
|
||||
conversation = await prisma.conversation.findUnique({
|
||||
where: { id: conversationId },
|
||||
include: { messages: { orderBy: { createdAt: 'asc' }, take: 10 } }
|
||||
})
|
||||
}
|
||||
|
||||
if (!conversation) {
|
||||
conversation = await prisma.conversation.create({
|
||||
data: {
|
||||
userId,
|
||||
notebookId,
|
||||
title: message.substring(0, 50) + '...'
|
||||
},
|
||||
include: { messages: true }
|
||||
})
|
||||
}
|
||||
|
||||
// 2. Retrieval (RAG)
|
||||
// We search for relevant notes based on the current message or notebook context
|
||||
// Lower threshold for notebook-specific searches to ensure we find relevant content
|
||||
const searchResults = await semanticSearchService.search(message, {
|
||||
notebookId,
|
||||
limit: 10,
|
||||
threshold: notebookId ? 0.3 : 0.5
|
||||
})
|
||||
|
||||
const contextNotes = searchResults.map(r =>
|
||||
`NOTE [${r.title || untitledText}]: ${r.content}`
|
||||
).join('\n\n---\n\n')
|
||||
|
||||
// 3. System Prompt Synthesis
|
||||
const systemPrompt = `Tu es l'Assistant IA de Memento. Tu accompagnes l'utilisateur dans sa réflexion.
|
||||
Tes réponses doivent être concises, premium et utiles.
|
||||
${contextNotes.length > 0 ? `Voici des extraits de notes de l'utilisateur qui pourraient t'aider à répondre :\n\n${contextNotes}\n\nUtilise ces informations si elles sont pertinentes, mais ne les cite pas mot pour mot sauf si demandé.` : noNotesFoundText}
|
||||
Si l'utilisateur pose une question sur un carnet spécifique, reste focalisé sur ce contexte.`
|
||||
|
||||
// 4. Call AI Provider
|
||||
const history = (conversation.messages || []).map((m: any) => ({
|
||||
role: m.role,
|
||||
content: m.content
|
||||
}))
|
||||
|
||||
const currentMessages = [...history, { role: 'user', content: message }]
|
||||
|
||||
const config = await getSystemConfig()
|
||||
const provider = getChatProvider(config)
|
||||
const aiResponse = await provider.chat(currentMessages, systemPrompt)
|
||||
|
||||
// 5. Save Messages to DB
|
||||
await prisma.chatMessage.createMany({
|
||||
data: [
|
||||
{ conversationId: conversation.id, role: 'user', content: message },
|
||||
{ conversationId: conversation.id, role: 'assistant', content: aiResponse.text }
|
||||
]
|
||||
})
|
||||
|
||||
return {
|
||||
message: aiResponse.text,
|
||||
conversationId: conversation.id,
|
||||
suggestedNotes: searchResults.map(r => ({ id: r.noteId, title: r.title || untitledText }))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get conversation history
|
||||
*/
|
||||
async getHistory(conversationId: string) {
|
||||
return prisma.conversation.findUnique({
|
||||
where: { id: conversationId },
|
||||
include: {
|
||||
messages: {
|
||||
orderBy: { createdAt: 'asc' }
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* List user conversations
|
||||
*/
|
||||
async listConversations(userId: string) {
|
||||
return prisma.conversation.findMany({
|
||||
where: { userId },
|
||||
orderBy: { updatedAt: 'desc' },
|
||||
take: 20
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const chatService = new ChatService()
|
||||
@@ -68,3 +68,24 @@ export {
|
||||
notebookSummaryService,
|
||||
type NotebookSummary
|
||||
} from './notebook-summary.service'
|
||||
|
||||
// Chat
|
||||
export {
|
||||
ChatService,
|
||||
chatService,
|
||||
type ChatResponse
|
||||
} from './chat.service'
|
||||
|
||||
// Scrape
|
||||
export {
|
||||
ScrapeService,
|
||||
scrapeService,
|
||||
type ScrapedContent
|
||||
} from './scrape.service'
|
||||
|
||||
// Tool Registry
|
||||
export {
|
||||
toolRegistry,
|
||||
type ToolContext,
|
||||
type RegisteredTool
|
||||
} from '../tools'
|
||||
|
||||
@@ -61,6 +61,7 @@ export class MemoryEchoService {
|
||||
where: {
|
||||
userId,
|
||||
isArchived: false,
|
||||
trashedAt: null,
|
||||
noteEmbedding: { isNot: null } // Only notes with embeddings
|
||||
},
|
||||
select: {
|
||||
@@ -284,6 +285,11 @@ Explain in one brief sentence (max 15 words) why these notes are connected. Focu
|
||||
)
|
||||
|
||||
// Store insight in database
|
||||
// In demo mode, add milliseconds offset to avoid @@unique([userId, insightDate]) collision
|
||||
const insightDateValue = demoMode
|
||||
? new Date(Date.now() + Math.floor(Math.random() * 1000))
|
||||
: new Date()
|
||||
|
||||
const insight = await prisma.memoryEchoInsight.create({
|
||||
data: {
|
||||
userId,
|
||||
@@ -291,7 +297,7 @@ Explain in one brief sentence (max 15 words) why these notes are connected. Focu
|
||||
note2Id: newConnection.note2.id,
|
||||
similarityScore: newConnection.similarityScore,
|
||||
insight: insightText,
|
||||
insightDate: new Date(),
|
||||
insightDate: insightDateValue,
|
||||
viewed: false
|
||||
},
|
||||
include: {
|
||||
@@ -410,6 +416,7 @@ Explain in one brief sentence (max 15 words) why these notes are connected. Focu
|
||||
userId,
|
||||
id: { not: noteId },
|
||||
isArchived: false,
|
||||
trashedAt: null,
|
||||
noteEmbedding: { isNot: null }
|
||||
},
|
||||
select: {
|
||||
|
||||
@@ -55,6 +55,7 @@ export class NotebookSummaryService {
|
||||
where: {
|
||||
notebookId,
|
||||
userId,
|
||||
trashedAt: null,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
|
||||
92
keep-notes/lib/ai/services/rss.service.ts
Normal file
92
keep-notes/lib/ai/services/rss.service.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* RSS/Atom Feed Service
|
||||
* Parses RSS and Atom feeds and returns structured article entries.
|
||||
* Used by the scraper pipeline to get individual article URLs from feeds.
|
||||
*/
|
||||
|
||||
import Parser from 'rss-parser'
|
||||
|
||||
export interface FeedArticle {
|
||||
title: string
|
||||
link: string
|
||||
pubDate?: string
|
||||
contentSnippet?: string
|
||||
content?: string
|
||||
creator?: string
|
||||
}
|
||||
|
||||
export interface ParsedFeed {
|
||||
title: string
|
||||
description?: string
|
||||
link?: string
|
||||
articles: FeedArticle[]
|
||||
}
|
||||
|
||||
const parser = new Parser({
|
||||
timeout: 15000,
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': 'application/rss+xml, application/xml, text/xml, application/atom+xml, text/html;q=0.9',
|
||||
},
|
||||
})
|
||||
|
||||
const MAX_ARTICLES_PER_FEED = 8
|
||||
|
||||
export class RssService {
|
||||
/**
|
||||
* Detect if a URL looks like an RSS/Atom feed
|
||||
*/
|
||||
isFeedUrl(url: string): boolean {
|
||||
const feedPatterns = [
|
||||
'/feed', '/rss', '/atom', '/feed/', '/rss/',
|
||||
'.xml', '.rss', '.atom',
|
||||
'/feed/json',
|
||||
]
|
||||
const lower = url.toLowerCase()
|
||||
return feedPatterns.some(p => lower.includes(p))
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse a URL as an RSS/Atom feed.
|
||||
* Returns null if the URL is not a valid feed.
|
||||
*/
|
||||
async parseFeed(feedUrl: string): Promise<ParsedFeed | null> {
|
||||
try {
|
||||
const result = await parser.parseURL(feedUrl)
|
||||
return {
|
||||
title: result.title || feedUrl,
|
||||
description: result.description,
|
||||
link: result.link,
|
||||
articles: (result.items || [])
|
||||
.slice(0, MAX_ARTICLES_PER_FEED)
|
||||
.map(item => ({
|
||||
title: item.title || 'Sans titre',
|
||||
link: item.link || '',
|
||||
pubDate: item.pubDate || item.isoDate,
|
||||
contentSnippet: (item.contentSnippet || '').substring(0, 500),
|
||||
content: item['content:encoded'] || item.content || '',
|
||||
creator: item.creator || item.dc?.creator,
|
||||
}))
|
||||
.filter(a => a.link), // Only keep entries with a link
|
||||
}
|
||||
} catch {
|
||||
// Not a valid feed or fetch failed
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch an RSS feed and return only the article URLs for scraping.
|
||||
* Useful when you want to scrape articles individually.
|
||||
*/
|
||||
async getArticleUrls(feedUrl: string): Promise<{ feedTitle: string; urls: string[] }> {
|
||||
const feed = await this.parseFeed(feedUrl)
|
||||
if (!feed) return { feedTitle: '', urls: [] }
|
||||
return {
|
||||
feedTitle: feed.title,
|
||||
urls: feed.articles.map(a => a.link),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const rssService = new RssService()
|
||||
68
keep-notes/lib/ai/services/scrape.service.ts
Normal file
68
keep-notes/lib/ai/services/scrape.service.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* Scrape Service
|
||||
* Advanced content extraction using Readability and jsdom
|
||||
*/
|
||||
|
||||
import { JSDOM } from 'jsdom'
|
||||
import { Readability } from '@mozilla/readability'
|
||||
|
||||
export interface ScrapedContent {
|
||||
title: string
|
||||
content: string // Markdown or clean text
|
||||
textContent: string
|
||||
excerpt: string
|
||||
byline: string
|
||||
siteName: string
|
||||
url: string
|
||||
}
|
||||
|
||||
export class ScrapeService {
|
||||
async scrapeUrl(url: string): Promise<ScrapedContent | null> {
|
||||
try {
|
||||
// Add protocol if missing
|
||||
let targetUrl = url
|
||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||
targetUrl = 'https://' + url
|
||||
}
|
||||
|
||||
console.log(`[ScrapeService] Fetching ${targetUrl}...`)
|
||||
|
||||
const response = await fetch(targetUrl, {
|
||||
headers: {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||
},
|
||||
next: { revalidate: 3600 }
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`)
|
||||
}
|
||||
|
||||
const html = await response.text()
|
||||
const dom = new JSDOM(html, { url: targetUrl })
|
||||
|
||||
const reader = new Readability(dom.window.document)
|
||||
const article = reader.parse()
|
||||
|
||||
if (!article) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
title: article.title,
|
||||
content: article.content, // HTML fragment from readability
|
||||
textContent: article.textContent, // Clean text
|
||||
excerpt: article.excerpt,
|
||||
byline: article.byline,
|
||||
siteName: article.siteName,
|
||||
url: targetUrl
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[ScrapeService] Error scraping ${url}:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const scrapeService = new ScrapeService()
|
||||
@@ -22,6 +22,7 @@ export interface SearchOptions {
|
||||
threshold?: number // Minimum similarity score (0-1)
|
||||
includeExactMatches?: boolean
|
||||
notebookId?: string // NEW: Filter by notebook for contextual search (IA5)
|
||||
defaultTitle?: string // Optional default title for untitled notes (i18n)
|
||||
}
|
||||
|
||||
export class SemanticSearchService {
|
||||
@@ -40,7 +41,8 @@ export class SemanticSearchService {
|
||||
limit = this.DEFAULT_LIMIT,
|
||||
threshold = this.DEFAULT_THRESHOLD,
|
||||
includeExactMatches = true,
|
||||
notebookId // NEW: Contextual search within notebook (IA5)
|
||||
notebookId, // NEW: Contextual search within notebook (IA5)
|
||||
defaultTitle = 'Untitled' // Default title for i18n
|
||||
} = options
|
||||
|
||||
if (!query || query.trim().length < 2) {
|
||||
@@ -63,14 +65,15 @@ export class SemanticSearchService {
|
||||
semanticResults
|
||||
)
|
||||
|
||||
// 4. Sort by final score and limit
|
||||
return fusedResults
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, limit)
|
||||
.map(result => ({
|
||||
...result,
|
||||
matchType: result.score > 0.8 ? 'exact' : 'related'
|
||||
}))
|
||||
// 4. Sort by final score and limit
|
||||
return fusedResults
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, limit)
|
||||
.map(result => ({
|
||||
...result,
|
||||
title: result.title || defaultTitle,
|
||||
matchType: result.score > 0.8 ? 'exact' : 'related'
|
||||
}))
|
||||
} catch (error) {
|
||||
console.error('Error in hybrid search:', error)
|
||||
// Fallback to keyword-only search
|
||||
@@ -79,7 +82,7 @@ export class SemanticSearchService {
|
||||
// Fetch note details for keyword results
|
||||
const noteIds = keywordResults.slice(0, limit).map(r => r.noteId)
|
||||
const notes = await prisma.note.findMany({
|
||||
where: { id: { in: noteIds } },
|
||||
where: { id: { in: noteIds }, trashedAt: null },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
@@ -90,7 +93,7 @@ export class SemanticSearchService {
|
||||
|
||||
return notes.map(note => ({
|
||||
noteId: note.id,
|
||||
title: note.title,
|
||||
title: note.title || defaultTitle,
|
||||
content: note.content,
|
||||
score: 1.0, // Default score for keyword-only results
|
||||
matchType: 'related' as const,
|
||||
@@ -107,17 +110,27 @@ export class SemanticSearchService {
|
||||
userId: string | null,
|
||||
notebookId?: string // NEW: Filter by notebook (IA5)
|
||||
): Promise<Array<{ noteId: string; rank: number }>> {
|
||||
// Build query for case-insensitive search
|
||||
const searchPattern = `%${query}%`
|
||||
// Extract keywords (words with > 3 characters) to avoid entire sentence matching failing
|
||||
const stopWords = new Set(['comment', 'pourquoi', 'lequel', 'laquelle', 'avec', 'pour', 'dans', 'sur', 'est-ce']);
|
||||
const keywords = query.toLowerCase()
|
||||
.split(/[^a-z0-9àáâäçéèêëíìîïñóòôöúùûü]/i)
|
||||
.filter(w => w.length > 3 && !stopWords.has(w));
|
||||
|
||||
// If no good keywords found, fallback to the original query but it'll likely fail
|
||||
const searchTerms = keywords.length > 0 ? keywords : [query];
|
||||
|
||||
// Build Prisma OR clauses for each keyword
|
||||
const searchConditions = searchTerms.flatMap(term => [
|
||||
{ title: { contains: term } },
|
||||
{ content: { contains: term } }
|
||||
]);
|
||||
|
||||
const notes = await prisma.note.findMany({
|
||||
where: {
|
||||
...(userId ? { userId } : {}),
|
||||
...(notebookId !== undefined ? { notebookId } : {}), // NEW: Notebook filter
|
||||
OR: [
|
||||
{ title: { contains: query } },
|
||||
{ content: { contains: query } }
|
||||
]
|
||||
trashedAt: null,
|
||||
OR: searchConditions
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
@@ -178,6 +191,7 @@ export class SemanticSearchService {
|
||||
where: {
|
||||
...(userId ? { userId } : {}),
|
||||
...(notebookId !== undefined ? { notebookId } : {}),
|
||||
trashedAt: null,
|
||||
noteEmbedding: { isNot: null }
|
||||
},
|
||||
select: {
|
||||
@@ -245,7 +259,7 @@ export class SemanticSearchService {
|
||||
// Fetch note details
|
||||
const noteIds = Array.from(scores.keys())
|
||||
const notes = await prisma.note.findMany({
|
||||
where: { id: { in: noteIds } },
|
||||
where: { id: { in: noteIds }, trashedAt: null },
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
@@ -313,6 +327,46 @@ export class SemanticSearchService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search as a specific user (no auth() call).
|
||||
* Used by agent tools that run server-side without HTTP session.
|
||||
*/
|
||||
async searchAsUser(
|
||||
userId: string,
|
||||
query: string,
|
||||
options: SearchOptions = {}
|
||||
): Promise<SearchResult[]> {
|
||||
const {
|
||||
limit = this.DEFAULT_LIMIT,
|
||||
threshold = this.DEFAULT_THRESHOLD,
|
||||
includeExactMatches = true,
|
||||
notebookId,
|
||||
defaultTitle = 'Untitled'
|
||||
} = options
|
||||
|
||||
if (!query || query.trim().length < 2) {
|
||||
return []
|
||||
}
|
||||
|
||||
try {
|
||||
const keywordResults = await this.keywordSearch(query, userId, notebookId)
|
||||
const semanticResults = await this.semanticVectorSearch(query, userId, threshold, notebookId)
|
||||
const fusedResults = await this.reciprocalRankFusion(keywordResults, semanticResults)
|
||||
|
||||
return fusedResults
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, limit)
|
||||
.map(result => ({
|
||||
...result,
|
||||
title: result.title || defaultTitle,
|
||||
matchType: result.score > 0.8 ? 'exact' : 'related'
|
||||
}))
|
||||
} catch (error) {
|
||||
console.error('Error in searchAsUser:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch index multiple notes (for initial migration or bulk updates)
|
||||
*/
|
||||
|
||||
167
keep-notes/lib/ai/tools/extract-images.ts
Normal file
167
keep-notes/lib/ai/tools/extract-images.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Image Extraction Utility
|
||||
* Extracts image URLs from web pages using Cheerio.
|
||||
* Downloads and saves images locally for agent note attachment.
|
||||
*/
|
||||
|
||||
import * as cheerio from 'cheerio'
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
import { randomUUID } from 'crypto'
|
||||
import sharp from 'sharp'
|
||||
|
||||
const UPLOADS_DIR = 'public/uploads/notes'
|
||||
const URL_PREFIX = '/uploads/notes'
|
||||
const MAX_IMAGES_PER_PAGE = 3
|
||||
const MIN_IMAGE_SIZE = 200 // px -- skip icons, spacers, tracking pixels
|
||||
const MAX_IMAGE_WIDTH = 600 // px -- resize for note-friendly display
|
||||
|
||||
export interface ExtractedImage {
|
||||
url: string
|
||||
localPath?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract image URLs from an HTML page.
|
||||
* Prioritizes og:image, then article images with size filtering.
|
||||
*/
|
||||
export function extractImageUrlsFromHtml(html: string, pageUrl: string): string[] {
|
||||
const $ = cheerio.load(html)
|
||||
const images: string[] = []
|
||||
const seen = new Set<string>()
|
||||
|
||||
// 1. Open Graph image
|
||||
const ogImage = $('meta[property="og:image"]').attr('content')
|
||||
if (ogImage) {
|
||||
const resolved = resolveUrl(ogImage, pageUrl)
|
||||
if (resolved && !seen.has(resolved)) {
|
||||
images.push(resolved)
|
||||
seen.add(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Twitter card image
|
||||
const twitterImage = $('meta[name="twitter:image"]').attr('content')
|
||||
if (twitterImage) {
|
||||
const resolved = resolveUrl(twitterImage, pageUrl)
|
||||
if (resolved && !seen.has(resolved)) {
|
||||
images.push(resolved)
|
||||
seen.add(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Article body images (filter by size and relevance)
|
||||
$('article img, main img, .content img, .post-content img, .entry-content img, .article-body img').each((_, el) => {
|
||||
if (images.length >= MAX_IMAGES_PER_PAGE) return false
|
||||
const src = $(el).attr('src') || $(el).attr('data-src')
|
||||
if (!src) return
|
||||
const width = parseInt($(el).attr('width') || '0', 10)
|
||||
const height = parseInt($(el).attr('height') || '0', 10)
|
||||
// Skip if explicitly sized too small
|
||||
if ((width > 0 && width < MIN_IMAGE_SIZE) || (height > 0 && height < MIN_IMAGE_SIZE)) return
|
||||
// Skip common non-content patterns
|
||||
if (src.includes('avatar') || src.includes('icon') || src.includes('logo') || src.includes('badge') || src.includes('spinner')) return
|
||||
const resolved = resolveUrl(src, pageUrl)
|
||||
if (resolved && !seen.has(resolved)) {
|
||||
images.push(resolved)
|
||||
seen.add(resolved)
|
||||
}
|
||||
})
|
||||
|
||||
// 4. Fallback: any large images in the page if we still have room
|
||||
if (images.length < MAX_IMAGES_PER_PAGE) {
|
||||
$('img').each((_, el) => {
|
||||
if (images.length >= MAX_IMAGES_PER_PAGE) return false
|
||||
const src = $(el).attr('src') || $(el).attr('data-src')
|
||||
if (!src) return
|
||||
const width = parseInt($(el).attr('width') || '0', 10)
|
||||
const height = parseInt($(el).attr('height') || '0', 10)
|
||||
if ((width > 0 && width < MIN_IMAGE_SIZE) || (height > 0 && height < MIN_IMAGE_SIZE)) return
|
||||
if (src.includes('avatar') || src.includes('icon') || src.includes('logo') || src.includes('badge') || src.includes('spinner') || src.includes('pixel') || src.includes('tracking')) return
|
||||
const resolved = resolveUrl(src, pageUrl)
|
||||
if (resolved && !seen.has(resolved)) {
|
||||
images.push(resolved)
|
||||
seen.add(resolved)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return images.slice(0, MAX_IMAGES_PER_PAGE)
|
||||
}
|
||||
|
||||
/**
|
||||
* Download an image and save it locally.
|
||||
*/
|
||||
export async function downloadImage(imageUrl: string): Promise<string | null> {
|
||||
try {
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(() => controller.abort(), 10000)
|
||||
|
||||
const response = await fetch(imageUrl, {
|
||||
signal: controller.signal,
|
||||
headers: { 'User-Agent': 'Mozilla/5.0 (compatible; KeepBot/1.0)' },
|
||||
})
|
||||
clearTimeout(timeout)
|
||||
|
||||
if (!response.ok) return null
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
if (!contentType.startsWith('image/')) return null
|
||||
|
||||
const buffer = Buffer.from(await response.arrayBuffer())
|
||||
if (buffer.length < 1024) return null // Skip tiny files
|
||||
|
||||
const ext = contentType.split('/')[1]?.replace('jpeg', 'jpg') || 'jpg'
|
||||
const filename = `${randomUUID()}.${ext}`
|
||||
|
||||
await fs.mkdir(path.join(process.cwd(), UPLOADS_DIR), { recursive: true })
|
||||
|
||||
// Resize to max width for note-friendly display
|
||||
try {
|
||||
await sharp(buffer)
|
||||
.resize(MAX_IMAGE_WIDTH, null, { withoutEnlargement: true })
|
||||
.jpeg({ quality: 80 })
|
||||
.toFile(path.join(process.cwd(), UPLOADS_DIR, filename.replace(/\.\w+$/, '.jpg')))
|
||||
} catch {
|
||||
// Sharp failed (e.g. SVG, WebP unsupported) — save raw buffer
|
||||
await fs.writeFile(path.join(process.cwd(), UPLOADS_DIR, filename), buffer)
|
||||
}
|
||||
|
||||
// Always reference as .jpg since sharp converts to jpeg
|
||||
return `${URL_PREFIX}/${filename.replace(/\.\w+$/, '.jpg')}`
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract and download images from a web page.
|
||||
* Returns local URLs for successfully downloaded images.
|
||||
*/
|
||||
export async function extractAndDownloadImages(html: string, pageUrl: string): Promise<string[]> {
|
||||
const imageUrls = extractImageUrlsFromHtml(html, pageUrl)
|
||||
const localUrls: string[] = []
|
||||
|
||||
for (const url of imageUrls) {
|
||||
const localPath = await downloadImage(url)
|
||||
if (localPath) {
|
||||
localUrls.push(localPath)
|
||||
}
|
||||
}
|
||||
|
||||
return localUrls
|
||||
}
|
||||
|
||||
function resolveUrl(src: string, pageUrl: string): string | null {
|
||||
try {
|
||||
if (src.startsWith('//')) return `https:${src}`
|
||||
if (src.startsWith('http://') || src.startsWith('https://')) return src
|
||||
if (src.startsWith('/') || src.startsWith('./')) {
|
||||
const base = new URL(pageUrl)
|
||||
return new URL(src, base.origin).href
|
||||
}
|
||||
return new URL(src, pageUrl).href
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
15
keep-notes/lib/ai/tools/index.ts
Normal file
15
keep-notes/lib/ai/tools/index.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Tools Index
|
||||
* Side-effect imports register all tools into the registry.
|
||||
*/
|
||||
|
||||
// Import all tools (side-effect registration)
|
||||
import './web-search.tool'
|
||||
import './note-search.tool'
|
||||
import './note-crud.tool'
|
||||
import './web-scrape.tool'
|
||||
import './url-fetch.tool'
|
||||
import './memory.tool'
|
||||
|
||||
// Re-export registry
|
||||
export { toolRegistry, type ToolContext, type RegisteredTool } from './registry'
|
||||
62
keep-notes/lib/ai/tools/memory.tool.ts
Normal file
62
keep-notes/lib/ai/tools/memory.tool.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Memory Search Tool
|
||||
* Searches past AgentActions (logs, toolLogs, inputs) for context.
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { toolRegistry } from './registry'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
toolRegistry.register({
|
||||
name: 'memory_search',
|
||||
description: 'Search past agent execution history for relevant information. Looks through previous logs, tool traces, and inputs.',
|
||||
isInternal: true,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Search past agent executions for context. Searches through logs and tool traces from previous runs.',
|
||||
inputSchema: z.object({
|
||||
query: z.string().describe('What to search for in past executions'),
|
||||
limit: z.number().optional().describe('Max results (default 5)').default(5),
|
||||
}),
|
||||
execute: async ({ query, limit = 5 }) => {
|
||||
try {
|
||||
// Get past actions for this agent
|
||||
const actions = await prisma.agentAction.findMany({
|
||||
where: {
|
||||
agentId: ctx.agentId,
|
||||
status: 'success',
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: limit * 2,
|
||||
select: { id: true, log: true, input: true, toolLog: true, createdAt: true },
|
||||
})
|
||||
|
||||
const keywords = query.toLowerCase().split(/\s+/).filter(w => w.length > 2)
|
||||
|
||||
const results = actions
|
||||
.map(a => {
|
||||
const searchable = `${a.log || ''} ${a.input || ''} ${a.toolLog || ''}`.toLowerCase()
|
||||
const score = keywords.reduce((acc, kw) => acc + (searchable.includes(kw) ? 1 : 0), 0)
|
||||
return { ...a, score }
|
||||
})
|
||||
.filter(r => r.score > 0)
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, limit)
|
||||
|
||||
if (results.length === 0) {
|
||||
return { message: 'No matching past executions found.', query }
|
||||
}
|
||||
|
||||
return results.map(r => ({
|
||||
actionId: r.id,
|
||||
date: r.createdAt.toISOString(),
|
||||
log: (r.log || '').substring(0, 800),
|
||||
input: r.input ? (r.input).substring(0, 500) : null,
|
||||
}))
|
||||
} catch (e: any) {
|
||||
return { error: `Memory search failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
104
keep-notes/lib/ai/tools/note-crud.tool.ts
Normal file
104
keep-notes/lib/ai/tools/note-crud.tool.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
/**
|
||||
* Note CRUD Tools
|
||||
* note_create, note_read, note_update
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { toolRegistry } from './registry'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
// --- note_read ---
|
||||
toolRegistry.register({
|
||||
name: 'note_read',
|
||||
description: 'Read a specific note by its ID. Returns the full note content.',
|
||||
isInternal: true,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Read a specific note by ID. Returns the full content.',
|
||||
inputSchema: z.object({
|
||||
noteId: z.string().describe('The ID of the note to read'),
|
||||
}),
|
||||
execute: async ({ noteId }) => {
|
||||
try {
|
||||
const note = await prisma.note.findFirst({
|
||||
where: { id: noteId, userId: ctx.userId },
|
||||
select: { id: true, title: true, content: true, isMarkdown: true, createdAt: true, updatedAt: true },
|
||||
})
|
||||
if (!note) return { error: 'Note not found' }
|
||||
return note
|
||||
} catch (e: any) {
|
||||
return { error: `Read note failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
// --- note_create ---
|
||||
toolRegistry.register({
|
||||
name: 'note_create',
|
||||
description: 'Create a new note with a title and content.',
|
||||
isInternal: true,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Create a new note.',
|
||||
inputSchema: z.object({
|
||||
title: z.string().describe('Title for the note'),
|
||||
content: z.string().describe('Content of the note (markdown supported)'),
|
||||
notebookId: z.string().optional().describe('Optional notebook ID to place the note in'),
|
||||
images: z.array(z.string()).optional().describe('Optional array of local image URL paths to attach to the note (e.g. ["/uploads/notes/abc.jpg"])'),
|
||||
}),
|
||||
execute: async ({ title, content, notebookId, images }) => {
|
||||
try {
|
||||
const note = await prisma.note.create({
|
||||
data: {
|
||||
title,
|
||||
content,
|
||||
isMarkdown: true,
|
||||
autoGenerated: true,
|
||||
userId: ctx.userId,
|
||||
notebookId: notebookId || null,
|
||||
images: images && images.length > 0 ? JSON.stringify(images) : null,
|
||||
},
|
||||
select: { id: true, title: true },
|
||||
})
|
||||
return { success: true, noteId: note.id, title: note.title }
|
||||
} catch (e: any) {
|
||||
return { error: `Create note failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
// --- note_update ---
|
||||
toolRegistry.register({
|
||||
name: 'note_update',
|
||||
description: 'Update an existing note\'s content.',
|
||||
isInternal: true,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Update an existing note.',
|
||||
inputSchema: z.object({
|
||||
noteId: z.string().describe('The ID of the note to update'),
|
||||
title: z.string().optional().describe('New title (optional)'),
|
||||
content: z.string().optional().describe('New content (optional)'),
|
||||
}),
|
||||
execute: async ({ noteId, title, content }) => {
|
||||
try {
|
||||
const existing = await prisma.note.findFirst({
|
||||
where: { id: noteId, userId: ctx.userId },
|
||||
})
|
||||
if (!existing) return { error: 'Note not found' }
|
||||
|
||||
const data: Record<string, any> = {}
|
||||
if (title !== undefined) data.title = title
|
||||
if (content !== undefined) data.content = content
|
||||
|
||||
await prisma.note.update({ where: { id: noteId }, data })
|
||||
return { success: true, noteId }
|
||||
} catch (e: any) {
|
||||
return { error: `Update note failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
54
keep-notes/lib/ai/tools/note-search.tool.ts
Normal file
54
keep-notes/lib/ai/tools/note-search.tool.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* Note Search Tool
|
||||
* Wraps semanticSearchService.searchAsUser()
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { toolRegistry } from './registry'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
toolRegistry.register({
|
||||
name: 'note_search',
|
||||
description: 'Search the user\'s notes using semantic search. Returns matching notes with titles and content excerpts.',
|
||||
isInternal: true,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Search the user\'s notes by keyword or semantic meaning. Returns matching notes with titles and content excerpts.',
|
||||
inputSchema: z.object({
|
||||
query: z.string().describe('The search query'),
|
||||
limit: z.number().optional().describe('Max results to return (default 5)').default(5),
|
||||
}),
|
||||
execute: async ({ query, limit = 5 }) => {
|
||||
try {
|
||||
// Keyword fallback search using Prisma
|
||||
const keywords = query.toLowerCase().split(/\s+/).filter(w => w.length > 2)
|
||||
const conditions = keywords.flatMap(term => [
|
||||
{ title: { contains: term } },
|
||||
{ content: { contains: term } }
|
||||
])
|
||||
|
||||
const notes = await prisma.note.findMany({
|
||||
where: {
|
||||
userId: ctx.userId,
|
||||
...(conditions.length > 0 ? { OR: conditions } : {}),
|
||||
isArchived: false,
|
||||
trashedAt: null,
|
||||
},
|
||||
select: { id: true, title: true, content: true, createdAt: true },
|
||||
take: limit,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
})
|
||||
|
||||
return notes.map(n => ({
|
||||
id: n.id,
|
||||
title: n.title || 'Untitled',
|
||||
excerpt: n.content.substring(0, 300),
|
||||
createdAt: n.createdAt.toISOString(),
|
||||
}))
|
||||
} catch (e: any) {
|
||||
return { error: `Note search failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
56
keep-notes/lib/ai/tools/registry.ts
Normal file
56
keep-notes/lib/ai/tools/registry.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
/**
|
||||
* Tool Registry
|
||||
* Central registry for all agent tools.
|
||||
* Tools self-register on import via side-effect in index.ts.
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
|
||||
export interface ToolContext {
|
||||
userId: string
|
||||
agentId: string
|
||||
actionId: string
|
||||
config: Record<string, string>
|
||||
}
|
||||
|
||||
export interface RegisteredTool {
|
||||
name: string
|
||||
description: string
|
||||
buildTool: (ctx: ToolContext) => any // Returns an AI SDK tool() synchronously
|
||||
isInternal: boolean // true = no API key needed
|
||||
}
|
||||
|
||||
class ToolRegistry {
|
||||
private tools: Map<string, RegisteredTool> = new Map()
|
||||
|
||||
register(tool: RegisteredTool): void {
|
||||
this.tools.set(tool.name, tool)
|
||||
}
|
||||
|
||||
get(name: string): RegisteredTool | undefined {
|
||||
return this.tools.get(name)
|
||||
}
|
||||
|
||||
buildToolsForAgent(toolNames: string[], ctx: ToolContext): Record<string, any> {
|
||||
const built: Record<string, any> = {}
|
||||
for (const name of toolNames) {
|
||||
const registered = this.tools.get(name)
|
||||
if (registered) {
|
||||
built[name] = registered.buildTool(ctx)
|
||||
}
|
||||
}
|
||||
return built
|
||||
}
|
||||
|
||||
getAvailableTools(): Array<{ name: string; description: string; isInternal: boolean }> {
|
||||
return Array.from(this.tools.values()).map(t => ({
|
||||
name: t.name,
|
||||
description: t.description,
|
||||
isInternal: t.isInternal,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton
|
||||
export const toolRegistry = new ToolRegistry()
|
||||
55
keep-notes/lib/ai/tools/url-fetch.tool.ts
Normal file
55
keep-notes/lib/ai/tools/url-fetch.tool.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* URL Fetch Tool
|
||||
* Fetches a URL and returns parsed content (JSON, CSV, or text).
|
||||
* Max 5MB response.
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { toolRegistry } from './registry'
|
||||
|
||||
const MAX_SIZE = 5 * 1024 * 1024 // 5MB
|
||||
|
||||
toolRegistry.register({
|
||||
name: 'url_fetch',
|
||||
description: 'Fetch a URL and return its content. Supports JSON, CSV, and plain text responses. Max 5MB.',
|
||||
isInternal: true,
|
||||
buildTool: (_ctx) =>
|
||||
tool({
|
||||
description: 'Fetch a URL and return its parsed content. Supports JSON, CSV, and text.',
|
||||
inputSchema: z.object({
|
||||
url: z.string().describe('The URL to fetch'),
|
||||
method: z.enum(['GET', 'POST']).optional().describe('HTTP method (default GET)').default('GET'),
|
||||
}),
|
||||
execute: async ({ url, method = 'GET' }) => {
|
||||
try {
|
||||
const response = await fetch(url, { method })
|
||||
if (!response.ok) return { error: `HTTP ${response.status}: ${response.statusText}` }
|
||||
|
||||
const contentLength = parseInt(response.headers.get('content-length') || '0')
|
||||
if (contentLength > MAX_SIZE) return { error: 'Response too large (max 5MB)' }
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
const text = await response.text()
|
||||
|
||||
if (text.length > MAX_SIZE) return { error: 'Response too large (max 5MB)' }
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
try {
|
||||
return { type: 'json', data: JSON.parse(text) }
|
||||
} catch {
|
||||
return { type: 'text', content: text.substring(0, 10000) }
|
||||
}
|
||||
}
|
||||
|
||||
if (contentType.includes('text/csv')) {
|
||||
return { type: 'csv', content: text.substring(0, 10000) }
|
||||
}
|
||||
|
||||
return { type: 'text', content: text.substring(0, 10000) }
|
||||
} catch (e: any) {
|
||||
return { error: `Fetch failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
88
keep-notes/lib/ai/tools/web-scrape.tool.ts
Normal file
88
keep-notes/lib/ai/tools/web-scrape.tool.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* Web Scrape Tool
|
||||
* Uses Jina Reader API (r.jina.ai) to scrape a URL into markdown.
|
||||
* Falls back to basic fetch on error.
|
||||
* Supports RSS/Atom feeds: parses the feed and scrapes top articles.
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { toolRegistry } from './registry'
|
||||
import { rssService } from '../services/rss.service'
|
||||
|
||||
const MAX_ARTICLE_CONTENT = 4000
|
||||
const MAX_TOTAL_CONTENT = 15000
|
||||
const MAX_ARTICLES_FROM_FEED = 5
|
||||
|
||||
async function scrapeSingleUrl(url: string, jinaKey?: string): Promise<{ content: string; url: string }> {
|
||||
const headers: Record<string, string> = { 'Accept': 'text/markdown' }
|
||||
if (jinaKey) {
|
||||
headers['Authorization'] = `Bearer ${jinaKey}`
|
||||
}
|
||||
|
||||
const response = await fetch(`https://r.jina.ai/${url}`, { headers })
|
||||
|
||||
if (!response.ok) {
|
||||
const fallback = await fetch(url)
|
||||
if (!fallback.ok) return { content: `Failed to fetch ${url}: ${fallback.status}`, url }
|
||||
const text = await fallback.text()
|
||||
return { content: text.substring(0, 10000), url }
|
||||
}
|
||||
|
||||
const markdown = await response.text()
|
||||
return { content: markdown.substring(0, MAX_TOTAL_CONTENT), url }
|
||||
}
|
||||
|
||||
toolRegistry.register({
|
||||
name: 'web_scrape',
|
||||
description: 'Scrape a web page and return its content as markdown. Supports RSS/Atom feeds — will automatically parse feeds and scrape individual articles.',
|
||||
isInternal: false,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Scrape a web page URL and return its content as clean markdown text. If the URL is an RSS/Atom feed, it will parse the feed and scrape the latest articles automatically.',
|
||||
inputSchema: z.object({
|
||||
url: z.string().describe('The URL to scrape. Can be a regular web page or an RSS/Atom feed URL.'),
|
||||
}),
|
||||
execute: async ({ url }) => {
|
||||
try {
|
||||
// Try RSS feed detection first
|
||||
if (rssService.isFeedUrl(url)) {
|
||||
const feed = await rssService.parseFeed(url)
|
||||
if (feed && feed.articles.length > 0) {
|
||||
const jinaKey = ctx.config.JINA_API_KEY
|
||||
const articlesToScrape = feed.articles.slice(0, MAX_ARTICLES_FROM_FEED)
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
articlesToScrape.map(article => scrapeSingleUrl(article.link, jinaKey))
|
||||
)
|
||||
|
||||
const parts: string[] = []
|
||||
parts.push(`# ${feed.title}\n_Flux RSS: ${url} — ${feed.articles.length} articles disponibles, ${articlesToScrape.length} scrapés_\n`)
|
||||
|
||||
let totalLen = 0
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
const r = results[i]
|
||||
if (r.status === 'fulfilled' && r.value.content) {
|
||||
const article = articlesToScrape[i]
|
||||
const header = `\n---\n\n## ${article.title}\n_Source: ${article.link}_${article.pubDate ? ` — ${new Date(article.pubDate).toISOString().split('T')[0]}` : ''}\n\n`
|
||||
const content = r.value.content.substring(0, MAX_ARTICLE_CONTENT)
|
||||
if (totalLen + header.length + content.length > MAX_TOTAL_CONTENT) break
|
||||
parts.push(header + content)
|
||||
totalLen += header.length + content.length
|
||||
}
|
||||
}
|
||||
|
||||
return { content: parts.join(''), url, feedTitle: feed.title, articlesScraped: articlesToScrape.length }
|
||||
}
|
||||
// If feed parsing failed, fall through to normal scraping
|
||||
}
|
||||
|
||||
// Normal web page scraping
|
||||
const result = await scrapeSingleUrl(url, ctx.config.JINA_API_KEY)
|
||||
return result
|
||||
} catch (e: any) {
|
||||
return { error: `Scrape failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
65
keep-notes/lib/ai/tools/web-search.tool.ts
Normal file
65
keep-notes/lib/ai/tools/web-search.tool.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
/**
|
||||
* Web Search Tool
|
||||
* Uses SearXNG or Brave Search API.
|
||||
*/
|
||||
|
||||
import { tool } from 'ai'
|
||||
import { z } from 'zod'
|
||||
import { toolRegistry } from './registry'
|
||||
|
||||
async function searchSearXNG(query: string, searxngUrl: string): Promise<any> {
|
||||
const url = `${searxngUrl.replace(/\/+$/, '')}/search?q=${encodeURIComponent(query)}&format=json`
|
||||
const response = await fetch(url, { headers: { 'Accept': 'application/json' } })
|
||||
if (!response.ok) throw new Error(`SearXNG error: ${response.status}`)
|
||||
const data = await response.json()
|
||||
return (data.results || []).slice(0, 8).map((r: any) => ({
|
||||
title: r.title,
|
||||
url: r.url,
|
||||
snippet: r.content || '',
|
||||
}))
|
||||
}
|
||||
|
||||
async function searchBrave(query: string, apiKey: string): Promise<any> {
|
||||
const url = `https://api.search.brave.com/res/v1/web/search?q=${encodeURIComponent(query)}&count=8`
|
||||
const response = await fetch(url, {
|
||||
headers: { 'Accept': 'application/json', 'X-Subscription-Token': apiKey }
|
||||
})
|
||||
if (!response.ok) throw new Error(`Brave error: ${response.status}`)
|
||||
const data = await response.json()
|
||||
return (data.web?.results || []).map((r: any) => ({
|
||||
title: r.title,
|
||||
url: r.url,
|
||||
snippet: r.description || '',
|
||||
}))
|
||||
}
|
||||
|
||||
toolRegistry.register({
|
||||
name: 'web_search',
|
||||
description: 'Search the web for information. Returns a list of results with titles, URLs and snippets.',
|
||||
isInternal: false,
|
||||
buildTool: (ctx) =>
|
||||
tool({
|
||||
description: 'Search the web for information. Returns results with titles, URLs and snippets.',
|
||||
inputSchema: z.object({
|
||||
query: z.string().describe('The search query'),
|
||||
}),
|
||||
execute: async ({ query }) => {
|
||||
try {
|
||||
const provider = ctx.config.WEB_SEARCH_PROVIDER || 'searxng'
|
||||
|
||||
if (provider === 'brave' || provider === 'both') {
|
||||
const apiKey = ctx.config.BRAVE_SEARCH_API_KEY
|
||||
if (apiKey) {
|
||||
return await searchBrave(query, apiKey)
|
||||
}
|
||||
}
|
||||
|
||||
// Default: SearXNG
|
||||
const searxngUrl = ctx.config.SEARXNG_URL || 'http://localhost:8080'
|
||||
return await searchSearXNG(query, searxngUrl)
|
||||
} catch (e: any) {
|
||||
return { error: `Web search failed: ${e.message}` }
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
@@ -8,6 +8,25 @@ export interface TitleSuggestion {
|
||||
confidence: number;
|
||||
}
|
||||
|
||||
export interface ToolUseOptions {
|
||||
tools: Record<string, any> // AI SDK tool() objects
|
||||
maxSteps?: number
|
||||
systemPrompt?: string
|
||||
messages?: any[]
|
||||
prompt?: string
|
||||
}
|
||||
|
||||
export interface ToolCallResult {
|
||||
toolCalls: Array<{ toolName: string; input: any }>
|
||||
toolResults: Array<{ toolName: string; input: any; output: any }>
|
||||
text: string
|
||||
steps: Array<{
|
||||
text: string
|
||||
toolCalls: Array<{ toolName: string; input: any }>
|
||||
toolResults: Array<{ toolName: string; input: any; output: any }>
|
||||
}>
|
||||
}
|
||||
|
||||
export interface AIProvider {
|
||||
/**
|
||||
* Analyse le contenu et suggère des tags pertinents.
|
||||
@@ -28,6 +47,21 @@ export interface AIProvider {
|
||||
* Génère du texte basé sur un prompt.
|
||||
*/
|
||||
generateText(prompt: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* Fournit une réponse de chat (utilisé pour le système agentique)
|
||||
*/
|
||||
chat(messages: any[], systemPrompt?: string): Promise<any>;
|
||||
|
||||
/**
|
||||
* Retourne le modèle AI SDK pour le streaming direct (utilisé par l'API route)
|
||||
*/
|
||||
getModel(): any;
|
||||
|
||||
/**
|
||||
* Generate text with tool-use support (multi-step agent loop)
|
||||
*/
|
||||
generateWithTools(options: ToolUseOptions): Promise<ToolCallResult>;
|
||||
}
|
||||
|
||||
export type AIProviderType = 'openai' | 'ollama';
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* ===================================
|
||||
*
|
||||
* Recommandations pour un système de couleurs unifié et moderne
|
||||
* Inspiré de Google Keep avec une approche contemporaine
|
||||
* Approche contemporaine des couleurs de notes
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
@@ -1,25 +1,17 @@
|
||||
import prisma from './prisma'
|
||||
import { unstable_cache } from 'next/cache'
|
||||
|
||||
const getCachedSystemConfig = unstable_cache(
|
||||
async () => {
|
||||
try {
|
||||
const configs = await prisma.systemConfig.findMany()
|
||||
return configs.reduce((acc, conf) => {
|
||||
acc[conf.key] = conf.value
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
} catch (e) {
|
||||
console.error('Failed to load system config from DB:', e)
|
||||
return {}
|
||||
}
|
||||
},
|
||||
['system-config'],
|
||||
{ tags: ['system-config'] }
|
||||
)
|
||||
|
||||
export async function getSystemConfig() {
|
||||
return getCachedSystemConfig()
|
||||
try {
|
||||
const configs = await prisma.systemConfig.findMany()
|
||||
return configs.reduce((acc, conf) => {
|
||||
acc[conf.key] = conf.value
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
} catch (e) {
|
||||
console.error('Failed to load system config from DB:', e)
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,6 +4,9 @@ import { createContext, useContext, useEffect, useState, useCallback, useRef } f
|
||||
import type { ReactNode } from 'react'
|
||||
import { SupportedLanguage, loadTranslations, getTranslationValue, Translations } from './load-translations'
|
||||
|
||||
// Static imports for SSR-safe initial translations (prevents hydration mismatch)
|
||||
import enTranslations from '@/locales/en.json'
|
||||
|
||||
type LanguageContextType = {
|
||||
language: SupportedLanguage
|
||||
setLanguage: (lang: SupportedLanguage) => void
|
||||
@@ -14,26 +17,52 @@ type LanguageContextType = {
|
||||
const LanguageContext = createContext<LanguageContextType | undefined>(undefined)
|
||||
|
||||
const RTL_LANGUAGES: SupportedLanguage[] = ['ar', 'fa']
|
||||
const SUPPORTED_LANGS: SupportedLanguage[] = ['en', 'fr', 'es', 'de', 'fa', 'it', 'pt', 'ru', 'zh', 'ja', 'ko', 'ar', 'hi', 'nl', 'pl']
|
||||
|
||||
function updateDocumentDirection(lang: SupportedLanguage) {
|
||||
document.documentElement.lang = lang
|
||||
document.documentElement.dir = RTL_LANGUAGES.includes(lang) ? 'rtl' : 'ltr'
|
||||
}
|
||||
|
||||
export function LanguageProvider({ children, initialLanguage = 'en' }: {
|
||||
/**
|
||||
* Resolve the actual language to use:
|
||||
* 1. If localStorage has a saved preference, use that (client only)
|
||||
* 2. Otherwise fall back to the server-detected initialLanguage
|
||||
*/
|
||||
function resolveLanguage(fallback: SupportedLanguage): SupportedLanguage {
|
||||
if (typeof window !== 'undefined') {
|
||||
try {
|
||||
const saved = localStorage.getItem('user-language') as SupportedLanguage
|
||||
if (saved && SUPPORTED_LANGS.includes(saved)) return saved
|
||||
} catch {}
|
||||
}
|
||||
return fallback
|
||||
}
|
||||
|
||||
export function LanguageProvider({ children, initialLanguage = 'en', initialTranslations }: {
|
||||
children: ReactNode
|
||||
initialLanguage?: SupportedLanguage
|
||||
initialTranslations?: Translations
|
||||
}) {
|
||||
const [language, setLanguageState] = useState<SupportedLanguage>(initialLanguage)
|
||||
const [translations, setTranslations] = useState<Translations | null>(null)
|
||||
// Resolve language synchronously from localStorage BEFORE any effect runs.
|
||||
// This prevents the flash where initialLanguage ('en') overrides RTL.
|
||||
const [language, setLanguageState] = useState<SupportedLanguage>(() => resolveLanguage(initialLanguage))
|
||||
|
||||
// Start with server-provided translations or English fallback
|
||||
const [translations, setTranslations] = useState<Translations>(
|
||||
(initialTranslations || enTranslations) as unknown as Translations
|
||||
)
|
||||
const cacheRef = useRef<Map<SupportedLanguage, Translations>>(new Map())
|
||||
const isFirstRender = useRef(true)
|
||||
|
||||
// Load translations when language changes (with caching)
|
||||
// On first render, skip updateDocumentDirection since the inline script already set it.
|
||||
useEffect(() => {
|
||||
const cached = cacheRef.current.get(language)
|
||||
if (cached) {
|
||||
setTranslations(cached)
|
||||
updateDocumentDirection(language)
|
||||
if (!isFirstRender.current) updateDocumentDirection(language)
|
||||
isFirstRender.current = false
|
||||
return
|
||||
}
|
||||
|
||||
@@ -41,28 +70,12 @@ export function LanguageProvider({ children, initialLanguage = 'en' }: {
|
||||
const loaded = await loadTranslations(language)
|
||||
cacheRef.current.set(language, loaded)
|
||||
setTranslations(loaded)
|
||||
updateDocumentDirection(language)
|
||||
if (!isFirstRender.current) updateDocumentDirection(language)
|
||||
isFirstRender.current = false
|
||||
}
|
||||
loadLang()
|
||||
}, [language])
|
||||
|
||||
// Load saved language from localStorage on mount
|
||||
useEffect(() => {
|
||||
const saved = localStorage.getItem('user-language') as SupportedLanguage
|
||||
if (saved) {
|
||||
setLanguageState(saved)
|
||||
} else {
|
||||
// Auto-detect from browser language
|
||||
const browserLang = navigator.language.split('-')[0] as SupportedLanguage
|
||||
const supportedLangs: SupportedLanguage[] = ['en', 'fr', 'es', 'de', 'fa', 'it', 'pt', 'ru', 'zh', 'ja', 'ko', 'ar', 'hi', 'nl', 'pl']
|
||||
|
||||
if (supportedLangs.includes(browserLang)) {
|
||||
setLanguageState(browserLang)
|
||||
localStorage.setItem('user-language', browserLang)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const setLanguage = useCallback((lang: SupportedLanguage) => {
|
||||
setLanguageState(lang)
|
||||
localStorage.setItem('user-language', lang)
|
||||
@@ -84,21 +97,6 @@ export function LanguageProvider({ children, initialLanguage = 'en' }: {
|
||||
return typeof value === 'string' ? value : key
|
||||
}, [translations])
|
||||
|
||||
// During initial load, show children with the initial language as fallback
|
||||
// to prevent blank flash
|
||||
if (!translations) {
|
||||
return (
|
||||
<LanguageContext.Provider value={{
|
||||
language: initialLanguage,
|
||||
setLanguage,
|
||||
t: (key: string) => key,
|
||||
translations: {} as Translations
|
||||
}}>
|
||||
{children}
|
||||
</LanguageContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<LanguageContext.Provider value={{ language, setLanguage, t, translations }}>
|
||||
{children}
|
||||
|
||||
57
keep-notes/lib/image-cleanup.ts
Normal file
57
keep-notes/lib/image-cleanup.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Image Cleanup Utility
|
||||
* Safely deletes orphaned image files from disk.
|
||||
* Checks database references before deleting to avoid breaking shared images.
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
const UPLOADS_DIR = 'public/uploads/notes'
|
||||
|
||||
/**
|
||||
* Delete an image file from disk only if no other note references it.
|
||||
* @param imageUrl - The relative URL path (e.g. "/uploads/notes/abc.jpg")
|
||||
* @param excludeNoteId - Note ID to exclude from reference check (the note being deleted)
|
||||
*/
|
||||
export async function deleteImageFileSafely(imageUrl: string, excludeNoteId?: string): Promise<void> {
|
||||
if (!imageUrl || !imageUrl.startsWith('/uploads/notes/')) return
|
||||
|
||||
try {
|
||||
const notes = await prisma.note.findMany({
|
||||
where: { images: { contains: imageUrl } },
|
||||
select: { id: true },
|
||||
})
|
||||
const otherRefs = notes.filter(n => n.id !== excludeNoteId)
|
||||
if (otherRefs.length > 0) return // File still referenced elsewhere
|
||||
|
||||
const filePath = path.join(process.cwd(), imageUrl)
|
||||
await fs.unlink(filePath)
|
||||
} catch {
|
||||
// File already gone or unreadable -- silently skip
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all image files associated with a note.
|
||||
* Checks that each image is not referenced by any other note before deleting.
|
||||
*/
|
||||
export async function cleanupNoteImages(noteId: string, imageUrls: string[]): Promise<void> {
|
||||
for (const url of imageUrls) {
|
||||
await deleteImageFileSafely(url, noteId)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the images JSON field from a note record.
|
||||
*/
|
||||
export function parseImageUrls(imagesJson: string | null): string[] {
|
||||
if (!imagesJson) return []
|
||||
try {
|
||||
const parsed = JSON.parse(imagesJson)
|
||||
return Array.isArray(parsed) ? parsed.filter((u: unknown) => typeof u === 'string') : []
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
@@ -1,45 +1,123 @@
|
||||
import nodemailer from 'nodemailer';
|
||||
import { getSystemConfig } from './config';
|
||||
|
||||
export interface InlineAttachment {
|
||||
filename: string
|
||||
content: Buffer
|
||||
cid: string
|
||||
}
|
||||
|
||||
interface MailOptions {
|
||||
to: string;
|
||||
subject: string;
|
||||
html: string;
|
||||
attachments?: InlineAttachment[];
|
||||
}
|
||||
|
||||
export async function sendEmail({ to, subject, html }: MailOptions) {
|
||||
const config = await getSystemConfig();
|
||||
interface MailResult {
|
||||
success: boolean;
|
||||
messageId?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
type EmailProvider = 'auto' | 'resend' | 'smtp';
|
||||
|
||||
/**
|
||||
* Send email.
|
||||
* - 'auto': try Resend first (if key set), fall back to SMTP on failure
|
||||
* - 'smtp': force SMTP only
|
||||
* - 'resend': force Resend only
|
||||
* Supports inline image attachments via cid: references in HTML.
|
||||
*/
|
||||
export async function sendEmail({ to, subject, html, attachments }: MailOptions, provider: EmailProvider = 'auto'): Promise<MailResult> {
|
||||
const config = await getSystemConfig();
|
||||
const resendKey = config.RESEND_API_KEY || process.env.RESEND_API_KEY;
|
||||
|
||||
// Force SMTP
|
||||
if (provider === 'smtp') {
|
||||
return sendViaSMTP(config, { to, subject, html, attachments });
|
||||
}
|
||||
|
||||
// Force Resend (no fallback)
|
||||
if (provider === 'resend') {
|
||||
if (!resendKey) return { success: false, error: 'No Resend API key configured' };
|
||||
return sendViaResend(resendKey, { to, subject, html, attachments });
|
||||
}
|
||||
|
||||
// Auto: try Resend, fall back to SMTP
|
||||
if (resendKey) {
|
||||
const result = await sendViaResend(resendKey, { to, subject, html, attachments });
|
||||
if (result.success) return result;
|
||||
|
||||
console.warn('[Mail] Resend failed, falling back to SMTP:', result.error);
|
||||
return sendViaSMTP(config, { to, subject, html, attachments });
|
||||
}
|
||||
|
||||
return sendViaSMTP(config, { to, subject, html, attachments });
|
||||
}
|
||||
|
||||
async function sendViaResend(apiKey: string, { to, subject, html, attachments }: MailOptions): Promise<MailResult> {
|
||||
try {
|
||||
const { Resend } = await import('resend');
|
||||
const resend = new Resend(apiKey);
|
||||
|
||||
const from = process.env.NEXTAUTH_URL
|
||||
? `Memento <noreply@${new URL(process.env.NEXTAUTH_URL).hostname}>`
|
||||
: 'Memento <onboarding@resend.dev>';
|
||||
|
||||
// Resend supports attachments with inline content
|
||||
const resendAttachments = attachments?.map(att => ({
|
||||
filename: att.filename,
|
||||
content: att.content.toString('base64'),
|
||||
content_type: att.filename.endsWith('.png') ? 'image/png' : 'image/jpeg',
|
||||
disposition: 'inline' as const,
|
||||
content_id: att.cid,
|
||||
}));
|
||||
|
||||
const { data, error } = await resend.emails.send({
|
||||
from,
|
||||
to,
|
||||
subject,
|
||||
html,
|
||||
attachments: resendAttachments,
|
||||
});
|
||||
|
||||
if (error) {
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
|
||||
return { success: true, messageId: data?.id };
|
||||
} catch (error: any) {
|
||||
return { success: false, error: `Resend: ${error.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
async function sendViaSMTP(config: Record<string, string>, { to, subject, html, attachments }: MailOptions): Promise<MailResult> {
|
||||
const host = config.SMTP_HOST || process.env.SMTP_HOST;
|
||||
const port = parseInt(config.SMTP_PORT || process.env.SMTP_PORT || '587');
|
||||
const user = (config.SMTP_USER || process.env.SMTP_USER || '').trim();
|
||||
const pass = (config.SMTP_PASS || process.env.SMTP_PASS || '').trim();
|
||||
const from = config.SMTP_FROM || process.env.SMTP_FROM || 'noreply@memento.app';
|
||||
|
||||
// Options de sécurité
|
||||
const forceSecure = config.SMTP_SECURE === 'true'; // Forcé par l'admin
|
||||
const isPort465 = port === 465;
|
||||
// Si secure n'est pas forcé, on déduit du port (465 = secure, autres = starttls)
|
||||
const secure = forceSecure || isPort465;
|
||||
|
||||
if (!host) {
|
||||
return { success: false, error: 'SMTP host is not configured' };
|
||||
}
|
||||
|
||||
const forceSecure = config.SMTP_SECURE === 'true';
|
||||
const isPort465 = port === 465;
|
||||
const secure = forceSecure || isPort465;
|
||||
const ignoreCerts = config.SMTP_IGNORE_CERT === 'true';
|
||||
|
||||
const transporter = nodemailer.createTransport({
|
||||
host: host || undefined,
|
||||
port: port || undefined,
|
||||
secure: secure || false,
|
||||
host,
|
||||
port,
|
||||
secure,
|
||||
auth: { user, pass },
|
||||
// Force IPv4 pour éviter les problèmes de résolution DNS/Docker
|
||||
family: 4,
|
||||
// Force AUTH LOGIN pour meilleure compatibilité (Mailcow, Exchange) vs PLAIN par défaut
|
||||
authMethod: 'LOGIN',
|
||||
// Timeout généreux
|
||||
connectionTimeout: 10000,
|
||||
tls: {
|
||||
// Si on ignore les certs, on autorise tout.
|
||||
// Sinon on laisse les défauts stricts de Node.
|
||||
rejectUnauthorized: !ignoreCerts,
|
||||
// Compatibilité vieux serveurs si besoin (optionnel, activé si ignoreCerts pour maximiser les chances)
|
||||
ciphers: ignoreCerts ? 'SSLv3' : undefined
|
||||
}
|
||||
} as any);
|
||||
@@ -47,19 +125,23 @@ export async function sendEmail({ to, subject, html }: MailOptions) {
|
||||
try {
|
||||
await transporter.verify();
|
||||
|
||||
// Build nodemailer inline attachments with cid
|
||||
const smtpAttachments = attachments?.map(att => ({
|
||||
filename: att.filename,
|
||||
content: att.content,
|
||||
cid: att.cid,
|
||||
}));
|
||||
|
||||
const info = await transporter.sendMail({
|
||||
from: `"Memento App" <${from}>`,
|
||||
from: `"Memento" <${from}>`,
|
||||
to,
|
||||
subject,
|
||||
html,
|
||||
attachments: smtpAttachments,
|
||||
});
|
||||
|
||||
return { success: true, messageId: info.messageId };
|
||||
} catch (error: any) {
|
||||
console.error("❌ Erreur SMTP:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: `Erreur envoi: ${error.message} (Code: ${error.code})`
|
||||
};
|
||||
console.error('SMTP error:', error);
|
||||
return { success: false, error: `SMTP: ${error.message} (Code: ${error.code})` };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
36
keep-notes/lib/notebook-icon.tsx
Normal file
36
keep-notes/lib/notebook-icon.tsx
Normal file
@@ -0,0 +1,36 @@
|
||||
import {
|
||||
Folder,
|
||||
Briefcase,
|
||||
FileText,
|
||||
Zap,
|
||||
BarChart3,
|
||||
Globe,
|
||||
Sparkles,
|
||||
Book,
|
||||
Heart,
|
||||
Crown,
|
||||
Music,
|
||||
Building2,
|
||||
Plane,
|
||||
type LucideIcon,
|
||||
} from 'lucide-react'
|
||||
|
||||
const ICON_MAP: Record<string, LucideIcon> = {
|
||||
'folder': Folder,
|
||||
'briefcase': Briefcase,
|
||||
'document': FileText,
|
||||
'lightning': Zap,
|
||||
'chart': BarChart3,
|
||||
'globe': Globe,
|
||||
'sparkle': Sparkles,
|
||||
'book': Book,
|
||||
'heart': Heart,
|
||||
'crown': Crown,
|
||||
'music': Music,
|
||||
'building': Building2,
|
||||
'flight_takeoff': Plane,
|
||||
}
|
||||
|
||||
export function getNotebookIcon(iconName: string | null | undefined): LucideIcon {
|
||||
return ICON_MAP[iconName || 'folder'] || Folder
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
// @ts-ignore - Generated client
|
||||
import { PrismaClient } from '../prisma/client-generated'
|
||||
import { PrismaClient } from '@prisma/client'
|
||||
|
||||
const prismaClientSingleton = () => {
|
||||
return new PrismaClient({
|
||||
@@ -17,10 +16,10 @@ declare const globalThis: {
|
||||
|
||||
const prisma = globalThis.prismaGlobal ?? prismaClientSingleton()
|
||||
|
||||
// Enable WAL mode for SQLite to improve concurrent read/write performance
|
||||
if (process.env.DATABASE_URL?.includes('sqlite') || prismaClientSingleton.toString().includes('sqlite')) {
|
||||
// Execute via an un-awaited promise or fire-and-forget, PRAGMA is session-based but setting it globally sets DB state
|
||||
prisma.$executeRawUnsafe('PRAGMA journal_mode = WAL;').catch(console.error)
|
||||
// Log current model keys to verify availability
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
const models = Object.keys(prisma).filter(k => !k.startsWith('_') && !k.startsWith('$'))
|
||||
console.log('[Prisma] Models loaded:', models.join(', '))
|
||||
}
|
||||
|
||||
export { prisma }
|
||||
|
||||
@@ -51,6 +51,7 @@ export interface Note {
|
||||
color: string;
|
||||
isPinned: boolean;
|
||||
isArchived: boolean;
|
||||
trashedAt?: Date | null;
|
||||
type: 'text' | 'checklist';
|
||||
checkItems: CheckItem[] | null;
|
||||
labels: string[] | null; // DEPRECATED: Array of label names stored as JSON string
|
||||
|
||||
Reference in New Issue
Block a user