fix: improve ollama object to telemetry structure

This commit is contained in:
Ralph Khreish
2025-05-19 20:49:16 +02:00
parent 60b8e97a1c
commit db6fdebdcd

View File

@@ -3,7 +3,7 @@
* AI provider implementation for Ollama models using the ollama-ai-provider package.
*/
import { createOllama, ollama } from 'ollama-ai-provider';
import { createOllama } from 'ollama-ai-provider';
import { log } from '../../scripts/modules/utils.js'; // Import logging utility
import { generateObject, generateText, streamText } from 'ai';
@@ -48,7 +48,13 @@ async function generateOllamaText({
temperature
});
log('debug', `Ollama generated text: ${result.text}`);
return result.text;
return {
text: result.text,
usage: {
inputTokens: result.usage.promptTokens,
outputTokens: result.usage.completionTokens
}
};
} catch (error) {
log(
'error',
@@ -138,7 +144,13 @@ async function generateOllamaObject({
temperature: temperature,
maxRetries: maxRetries
});
return result.object;
return {
object: result.object,
usage: {
inputTokens: result.usage.promptTokens,
outputTokens: result.usage.completionTokens
}
};
} catch (error) {
log(
'error',