## Translation Files - Add 11 new language files (es, de, pt, ru, zh, ja, ko, ar, hi, nl, pl) - Add 100+ missing translation keys across all 15 languages - New sections: notebook, pagination, ai.batchOrganization, ai.autoLabels - Update nav section with workspace, quickAccess, myLibrary keys ## Component Updates - Update 15+ components to use translation keys instead of hardcoded text - Components: notebook dialogs, sidebar, header, note-input, ghost-tags, etc. - Replace 80+ hardcoded English/French strings with t() calls - Ensure consistent UI across all supported languages ## Code Quality - Remove 77+ console.log statements from codebase - Clean up API routes, components, hooks, and services - Keep only essential error handling (no debugging logs) ## UI/UX Improvements - Update Keep logo to yellow post-it style (from-yellow-400 to-amber-500) - Change selection colors to #FEF3C6 (notebooks) and #EFB162 (nav items) - Make "+" button permanently visible in notebooks section - Fix grammar and syntax errors in multiple components ## Bug Fixes - Fix JSON syntax errors in it.json, nl.json, pl.json, zh.json - Fix syntax errors in notebook-suggestion-toast.tsx - Fix syntax errors in use-auto-tagging.ts - Fix syntax errors in paragraph-refactor.service.ts - Fix duplicate "fusion" section in nl.json 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com> Ou une version plus courte si vous préférez : feat(i18n): Add 15 languages, remove logs, update UI components - Create 11 new translation files (es, de, pt, ru, zh, ja, ko, ar, hi, nl, pl) - Add 100+ translation keys: notebook, pagination, AI features - Update 15+ components to use translations (80+ strings) - Remove 77+ console.log statements from codebase - Fix JSON syntax errors in 4 translation files - Fix component syntax errors (toast, hooks, services) - Update logo to yellow post-it style - Change selection colors (#FEF3C6, #EFB162) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
74 lines
3.2 KiB
TypeScript
74 lines
3.2 KiB
TypeScript
import { OpenAIProvider } from './providers/openai';
|
|
import { OllamaProvider } from './providers/ollama';
|
|
import { CustomOpenAIProvider } from './providers/custom-openai';
|
|
import { AIProvider } from './types';
|
|
|
|
type ProviderType = 'ollama' | 'openai' | 'custom';
|
|
|
|
function createOllamaProvider(config: Record<string, string>, modelName: string, embeddingModelName: string): OllamaProvider {
|
|
let baseUrl = config?.OLLAMA_BASE_URL || process.env.OLLAMA_BASE_URL || 'http://localhost:11434';
|
|
|
|
// Ensure baseUrl doesn't end with /api, we'll add it in OllamaProvider
|
|
if (baseUrl.endsWith('/api')) {
|
|
baseUrl = baseUrl.slice(0, -4); // Remove /api
|
|
}
|
|
|
|
return new OllamaProvider(baseUrl, modelName, embeddingModelName);
|
|
}
|
|
|
|
function createOpenAIProvider(config: Record<string, string>, modelName: string, embeddingModelName: string): OpenAIProvider {
|
|
const apiKey = config?.OPENAI_API_KEY || process.env.OPENAI_API_KEY || '';
|
|
|
|
if (!apiKey) {
|
|
}
|
|
|
|
return new OpenAIProvider(apiKey, modelName, embeddingModelName);
|
|
}
|
|
|
|
function createCustomOpenAIProvider(config: Record<string, string>, modelName: string, embeddingModelName: string): CustomOpenAIProvider {
|
|
const apiKey = config?.CUSTOM_OPENAI_API_KEY || process.env.CUSTOM_OPENAI_API_KEY || '';
|
|
const baseUrl = config?.CUSTOM_OPENAI_BASE_URL || process.env.CUSTOM_OPENAI_BASE_URL || '';
|
|
|
|
if (!apiKey) {
|
|
}
|
|
|
|
if (!baseUrl) {
|
|
}
|
|
|
|
return new CustomOpenAIProvider(apiKey, baseUrl, modelName, embeddingModelName);
|
|
}
|
|
|
|
function getProviderInstance(providerType: ProviderType, config: Record<string, string>, modelName: string, embeddingModelName: string): AIProvider {
|
|
switch (providerType) {
|
|
case 'ollama':
|
|
return createOllamaProvider(config, modelName, embeddingModelName);
|
|
case 'openai':
|
|
return createOpenAIProvider(config, modelName, embeddingModelName);
|
|
case 'custom':
|
|
return createCustomOpenAIProvider(config, modelName, embeddingModelName);
|
|
default:
|
|
return createOllamaProvider(config, modelName, embeddingModelName);
|
|
}
|
|
}
|
|
|
|
export function getTagsProvider(config?: Record<string, string>): AIProvider {
|
|
const providerType = (config?.AI_PROVIDER_TAGS || process.env.AI_PROVIDER_TAGS || 'ollama').toLowerCase() as ProviderType;
|
|
const modelName = config?.AI_MODEL_TAGS || process.env.AI_MODEL_TAGS || 'granite4:latest';
|
|
const embeddingModelName = config?.AI_MODEL_EMBEDDING || process.env.AI_MODEL_EMBEDDING || 'embeddinggemma:latest';
|
|
|
|
return getProviderInstance(providerType, config || {}, modelName, embeddingModelName);
|
|
}
|
|
|
|
export function getEmbeddingsProvider(config?: Record<string, string>): AIProvider {
|
|
const providerType = (config?.AI_PROVIDER_EMBEDDING || process.env.AI_PROVIDER_EMBEDDING || 'ollama').toLowerCase() as ProviderType;
|
|
const modelName = config?.AI_MODEL_TAGS || process.env.AI_MODEL_TAGS || 'granite4:latest';
|
|
const embeddingModelName = config?.AI_MODEL_EMBEDDING || process.env.AI_MODEL_EMBEDDING || 'embeddinggemma:latest';
|
|
|
|
return getProviderInstance(providerType, config || {}, modelName, embeddingModelName);
|
|
}
|
|
|
|
// Legacy function for backward compatibility
|
|
export function getAIProvider(config?: Record<string, string>): AIProvider {
|
|
return getTagsProvider(config);
|
|
}
|