Newer
Older
'use client';
import { faker } from '@faker-js/faker';
import { useChat as useBaseChat } from 'ai/react';
import { useSettings } from '@/components/editor/settings';
import { createOllama } from 'ollama-ai-provider';
import { streamText } from 'ai';
export const useChat = () => {
const { keys, model } = useSettings();
return useBaseChat({
id: 'editor',
api: '/api/ai/command',
body: {
model: model.value,
},
fetch: async (input, init) => {
try {
// First try the normal API endpoint
const res = await fetch(input, init);
if (res.ok) return res;
// If API endpoint fails, fallback to direct Ollama call
const { messages } = await JSON.parse(init?.body as string || '{}');
const ollama = createOllama({
baseURL: 'http://localhost:11434/api'
});
const result = await streamText({
model: ollama(model.value || 'phi3'),
messages,
maxTokens: 2048,
temperature: 0.7,
});
return result.toDataStreamResponse({
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
}
} catch (error) {
console.error('Chat error:', error);
throw error;
}
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
},
});
};
// Used for testing. Remove it after implementing useChat api.
const fakeStreamText = ({
chunkCount = 10,
streamProtocol = 'data',
}: {
chunkCount?: number;
streamProtocol?: 'data' | 'text';
} = {}) => {
const chunks = Array.from({ length: chunkCount }, () => ({
delay: faker.number.int({ max: 150, min: 50 }),
texts: faker.lorem.words({ max: 3, min: 1 }) + '',
}));
const encoder = new TextEncoder();
return new ReadableStream({
async start(controller) {
for (const chunk of chunks) {
await new Promise((resolve) => setTimeout(resolve, chunk.delay));
if (streamProtocol === 'text') {
controller.enqueue(encoder.encode(chunk.texts));
} else {
controller.enqueue(
encoder.encode(`0:${JSON.stringify(chunk.texts)}\n`)
);
}
}
if (streamProtocol === 'data') {
controller.enqueue(
`d:{"finishReason":"stop","usage":{"promptTokens":0,"completionTokens":${chunks.length}}}\n`
);
}
controller.close();
},
});