thinking-streaming.ts 866 B

123456789101112131415161718192021222324252627282930313233343536
  1. import ollama from 'ollama'
  2. async function main() {
  3. const response = await ollama.chat({
  4. model: 'deepseek-r1',
  5. messages: [
  6. {
  7. role: 'user',
  8. content: 'What is 10 + 23',
  9. },
  10. ],
  11. stream: true,
  12. think: true,
  13. })
  14. let startedThinking = false
  15. let finishedThinking = false
  16. for await (const chunk of response) {
  17. if (chunk.message.thinking && !startedThinking) {
  18. startedThinking = true
  19. process.stdout.write('Thinking:\n========\n\n')
  20. } else if (chunk.message.content && startedThinking && !finishedThinking) {
  21. finishedThinking = true
  22. process.stdout.write('\n\nResponse:\n========\n\n')
  23. }
  24. if (chunk.message.thinking) {
  25. process.stdout.write(chunk.message.thinking)
  26. } else if (chunk.message.content) {
  27. process.stdout.write(chunk.message.content)
  28. }
  29. }
  30. }
  31. main()