import { createOpenAI } from '@ai-sdk/openai'; import { generateText } from 'ai'; import { PrismaClient } from '@prisma/client'; const prisma = new PrismaClient(); async function run() { const configs = await prisma.systemConfig.findMany(); const config = configs.reduce((acc, c) => ({...acc, [c.key]: c.value}), {} as any); const customClient = createOpenAI({ baseURL: config.CUSTOM_OPENAI_BASE_URL || 'https://openrouter.ai/api/v1/', apiKey: config.CUSTOM_OPENAI_API_KEY, compatibility: 'compatible', fetch: async (url, options) => { const headers = new Headers(options?.headers); headers.set('HTTP-Referer', 'http://localhost:3000'); headers.set('X-Title', 'Test'); const res = await fetch(url, { ...options, headers }); if (!res.ok) { const text = await res.text(); console.error("RAW HTTP ERROR FROM OPENROUTER:", text); } return res; } }); const model = customClient(config.AI_MODEL_TAGS); const messages: any = [ { role: 'user', content: 'System Rules\n---\nhello' }, { role: 'assistant', content: 'Hello!' }, { role: 'user', content: 'dis moi...' } ]; try { const { text } = await generateText({ model: model, messages: messages, }); console.log("SUCCESS:", text); } catch (err: any) { console.error("SDK ERROR:", err.message); } } run().catch(console.error);