Browse Source

examples: add top-level await in examples (#206)

rising 3 tháng trước cách đây
mục cha
commit
9320655174

+ 10 - 6
examples/fill-in-middle/fill.ts

@@ -1,8 +1,12 @@
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
-const response = await ollama.generate({
-  model: 'deepseek-coder-v2',
-  prompt: `def add(`,
-  suffix: `return c`,
-})
-console.log(response.response)
+async function main() {
+  const response = await ollama.generate({
+    model: 'deepseek-coder-v2',
+    prompt: `def add(`,
+    suffix: `return c`,
+  })
+  console.log(response.response)
+}
+
+main().catch(console.error)

+ 13 - 9
examples/multimodal/multimodal.ts

@@ -1,12 +1,16 @@
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
-const imagePath = './examples/multimodal/cat.jpg'
-const response = await ollama.generate({
-  model: 'llava',
-  prompt: 'describe this image:',
-  images: [imagePath],
-  stream: true,
-})
-for await (const part of response) {
-  process.stdout.write(part.response)
+async function main() {
+  const imagePath = './examples/multimodal/cat.jpg'
+  const response = await ollama.generate({
+    model: 'llava',
+    prompt: 'describe this image:',
+    images: [imagePath],
+    stream: true,
+  })
+  for await (const part of response) {
+    process.stdout.write(part.response)
+  }
 }
 }
+
+main().catch(console.error)

+ 23 - 19
examples/pull-progress/pull.ts

@@ -1,25 +1,29 @@
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
-const model = 'llama3.1'
-console.log(`downloading ${model}...`)
-let currentDigestDone = false
-const stream = await ollama.pull({ model: model, stream: true })
-for await (const part of stream) {
-  if (part.digest) {
-    let percent = 0
-    if (part.completed && part.total) {
-      percent = Math.round((part.completed / part.total) * 100)
-    }
-    process.stdout.clearLine(0) // Clear the current line
-    process.stdout.cursorTo(0) // Move cursor to the beginning of the line
-    process.stdout.write(`${part.status} ${percent}%...`) // Write the new text
-    if (percent === 100 && !currentDigestDone) {
-      console.log() // Output to a new line
-      currentDigestDone = true
+async function main() {
+  const model = 'llama3.1'
+  console.log(`downloading ${model}...`)
+  let currentDigestDone = false
+  const stream = await ollama.pull({ model: model, stream: true })
+  for await (const part of stream) {
+    if (part.digest) {
+      let percent = 0
+      if (part.completed && part.total) {
+        percent = Math.round((part.completed / part.total) * 100)
+      }
+      process.stdout.clearLine(0) // Clear the current line
+      process.stdout.cursorTo(0) // Move cursor to the beginning of the line
+      process.stdout.write(`${part.status} ${percent}%...`) // Write the new text
+      if (percent === 100 && !currentDigestDone) {
+        console.log() // Output to a new line
+        currentDigestDone = true
+      } else {
+        currentDigestDone = false
+      }
     } else {
     } else {
-      currentDigestDone = false
+      console.log(part.status)
     }
     }
-  } else {
-    console.log(part.status)
   }
   }
 }
 }
+
+main().catch(console.error)