fix: 8 AI services were not using configured provider
CRITICAL FIX: Auto-labels, notebook summaries, and other AI features were not working because 8 services were calling getAIProvider() WITHOUT passing the config parameter. This caused them to use the default 'ollama' provider instead of the configured OpenAI provider from the database. ROOT CAUSE ANALYSIS: Working features (titles): - title-suggestions/route.ts: getAIProvider(config) ✓ Broken features (auto-labels, summaries): - contextual-auto-tag.service.ts: getAIProvider() ✗ (2x) - notebook-summary.service.ts: getAIProvider() ✗ - auto-label-creation.service.ts: getAIProvider() ✗ - notebook-suggestion.service.ts: getAIProvider() ✗ - batch-organization.service.ts: getAIProvider() ✗ - embedding.service.ts: getAIProvider() ✗ (2x) FIXED: All 8 services now properly call: const config = await getSystemConfig() const provider = getAIProvider(config) This ensures ALL AI features use the provider configured in the admin interface (OpenAI) instead of defaulting to Ollama. Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getAIProvider } from '@/lib/ai/factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
|
||||
export interface SuggestedLabel {
|
||||
name: string
|
||||
@@ -102,7 +103,8 @@ export class AutoLabelCreationService {
|
||||
const prompt = this.buildPrompt(notes, existingLabelNames)
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
const response = await provider.generateText(prompt)
|
||||
|
||||
// Parse AI response
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getAIProvider } from '@/lib/ai/factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
|
||||
export interface NoteForOrganization {
|
||||
id: string
|
||||
@@ -100,7 +101,8 @@ export class BatchOrganizationService {
|
||||
const prompt = this.buildPrompt(notes, notebooks)
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
const response = await provider.generateText(prompt)
|
||||
|
||||
// Parse AI response
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getAIProvider } from '@/lib/ai/factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
|
||||
export interface LabelSuggestion {
|
||||
label: string
|
||||
@@ -73,7 +74,8 @@ export class ContextualAutoTagService {
|
||||
const prompt = this.buildPrompt(noteContent, notebook.name, availableLabels)
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
|
||||
// Use generateText with JSON response
|
||||
const response = await provider.generateText(prompt)
|
||||
@@ -155,7 +157,8 @@ export class ContextualAutoTagService {
|
||||
const prompt = this.buildNewLabelsPrompt(noteContent, notebook.name)
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
|
||||
// Use generateText with JSON response
|
||||
const response = await provider.generateText(prompt)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
*/
|
||||
|
||||
import { getAIProvider } from '../factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
|
||||
export interface EmbeddingResult {
|
||||
embedding: number[]
|
||||
@@ -28,7 +29,8 @@ export class EmbeddingService {
|
||||
}
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
|
||||
// Use the existing getEmbeddings method from AIProvider
|
||||
const embedding = await provider.getEmbeddings(text)
|
||||
@@ -65,7 +67,8 @@ export class EmbeddingService {
|
||||
}
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
|
||||
// Batch embedding using the existing getEmbeddings method
|
||||
const embeddings = await Promise.all(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getAIProvider } from '@/lib/ai/factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
import type { Notebook } from '@/lib/types'
|
||||
|
||||
export class NotebookSuggestionService {
|
||||
@@ -31,7 +32,8 @@ export class NotebookSuggestionService {
|
||||
|
||||
// 3. Call AI
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
|
||||
const response = await provider.generateText(prompt)
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { getAIProvider } from '@/lib/ai/factory'
|
||||
import { getSystemConfig } from '@/lib/config'
|
||||
|
||||
export interface NotebookSummary {
|
||||
notebookId: string
|
||||
@@ -124,7 +125,8 @@ ${content}...`
|
||||
const prompt = this.buildPrompt(notesSummary, notebook.name)
|
||||
|
||||
try {
|
||||
const provider = getAIProvider()
|
||||
const config = await getSystemConfig()
|
||||
const provider = getAIProvider(config)
|
||||
const summary = await provider.generateText(prompt)
|
||||
return summary.trim()
|
||||
} catch (error) {
|
||||
|
||||
Reference in New Issue
Block a user