feat: switch from Mistral to OpenAI gpt-4o-mini model

- Replace @ai-sdk/mistral with @ai-sdk/openai dependency
- Update model from mistral-medium-latest to gpt-4o-mini
- Use OPENAI_API_KEY from environment for authentication
- Improve conversation generation quality with OpenAI model

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Rosario Moscato
2025-09-22 22:38:18 +02:00
parent 7be782f0d0
commit 30bb11d62a
3 changed files with 21 additions and 3 deletions

17
package-lock.json generated
View File

@@ -9,6 +9,7 @@
"version": "0.1.0", "version": "0.1.0",
"dependencies": { "dependencies": {
"@ai-sdk/mistral": "^2.0.15", "@ai-sdk/mistral": "^2.0.15",
"@ai-sdk/openai": "^2.0.32",
"@mendable/firecrawl-js": "^4.3.5", "@mendable/firecrawl-js": "^4.3.5",
"@radix-ui/react-progress": "^1.1.7", "@radix-ui/react-progress": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3", "@radix-ui/react-slot": "^1.2.3",
@@ -67,6 +68,22 @@
"zod": "^3.25.76 || ^4" "zod": "^3.25.76 || ^4"
} }
}, },
"node_modules/@ai-sdk/openai": {
"version": "2.0.32",
"resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-2.0.32.tgz",
"integrity": "sha512-p7giSkCs66Q1qYO/NPYI41CrSg65mcm8R2uAdF86+Y1D1/q4mUrWMyf5UTOJ0bx/z4jIPiNgGDCg2Kabi5zrKQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.9"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4"
}
},
"node_modules/@ai-sdk/provider": { "node_modules/@ai-sdk/provider": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz",

View File

@@ -10,6 +10,7 @@
}, },
"dependencies": { "dependencies": {
"@ai-sdk/mistral": "^2.0.15", "@ai-sdk/mistral": "^2.0.15",
"@ai-sdk/openai": "^2.0.32",
"@mendable/firecrawl-js": "^4.3.5", "@mendable/firecrawl-js": "^4.3.5",
"@radix-ui/react-progress": "^1.1.7", "@radix-ui/react-progress": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3", "@radix-ui/react-slot": "^1.2.3",

View File

@@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from 'next/server'; import { NextRequest, NextResponse } from 'next/server';
import { streamObject } from 'ai'; import { streamObject } from 'ai';
import { mistral } from '@ai-sdk/mistral'; import { openai } from '@ai-sdk/openai';
import { z } from 'zod'; import { z } from 'zod';
const messageSchema = z.object({ const messageSchema = z.object({
@@ -23,9 +23,9 @@ export async function POST(request: NextRequest) {
console.log('Generating streaming conversation for:', { title, url, contentLength: content.length, contentPreview: content.substring(0, 200) + '...' }); console.log('Generating streaming conversation for:', { title, url, contentLength: content.length, contentPreview: content.substring(0, 200) + '...' });
// Stream podcast conversation using Mistral // Stream podcast conversation using OpenAI
const result = streamObject({ const result = streamObject({
model: mistral('mistral-medium-latest'), model: openai('gpt-4o-mini'),
output: 'array', output: 'array',
schema: messageSchema, schema: messageSchema,
schemaName: 'PodcastMessage', schemaName: 'PodcastMessage',