#!/usr/bin/env node import { Ollama } from 'ollama'; import {getStdin} from '../lib/ai-generated.ts'; const ollama = new Ollama({ host: 'http://127.0.0.1:11434' }); const model = 'gemma3:12b'; // const model = 'codellama:13b'; // const model = 'aya:8b'; async function streamChat() { let [,,message] = process.argv; if (!message) return; const file = getStdin(); const stream = await ollama.chat({ model, messages: [ { role: 'user', content: `${message}\n${file}`}, ], stream: true, }); for await (const chunk of stream) { process.stdout.write(chunk.message.content); } } await streamChat();