Browse Source

update examples to `llama3.1` (#121)

Jeffrey Morgan 9 months ago
parent
commit
478a158b05

+ 4 - 4
README.md

@@ -14,7 +14,7 @@ npm i ollama
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
 const response = await ollama.chat({
 const response = await ollama.chat({
-  model: 'llama2',
+  model: 'llama3.1',
   messages: [{ role: 'user', content: 'Why is the sky blue?' }],
   messages: [{ role: 'user', content: 'Why is the sky blue?' }],
 })
 })
 console.log(response.message.content)
 console.log(response.message.content)
@@ -34,7 +34,7 @@ Response streaming can be enabled by setting `stream: true`, modifying function
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
 const message = { role: 'user', content: 'Why is the sky blue?' }
 const message = { role: 'user', content: 'Why is the sky blue?' }
-const response = await ollama.chat({ model: 'llama2', messages: [message], stream: true })
+const response = await ollama.chat({ model: 'llama3.1', messages: [message], stream: true })
 for await (const part of response) {
 for await (const part of response) {
   process.stdout.write(part.message.content)
   process.stdout.write(part.message.content)
 }
 }
@@ -46,7 +46,7 @@ for await (const part of response) {
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
 const modelfile = `
 const modelfile = `
-FROM llama2
+FROM llama3.1
 SYSTEM "You are mario from super mario bros."
 SYSTEM "You are mario from super mario bros."
 `
 `
 await ollama.create({ model: 'example', modelfile: modelfile })
 await ollama.create({ model: 'example', modelfile: modelfile })
@@ -209,7 +209,7 @@ import { Ollama } from 'ollama'
 
 
 const ollama = new Ollama({ host: 'http://127.0.0.1:11434' })
 const ollama = new Ollama({ host: 'http://127.0.0.1:11434' })
 const response = await ollama.chat({
 const response = await ollama.chat({
-  model: 'llama2',
+  model: 'llama3.1',
   messages: [{ role: 'user', content: 'Why is the sky blue?' }],
   messages: [{ role: 'user', content: 'Why is the sky blue?' }],
 })
 })
 ```
 ```

+ 1 - 1
examples/abort/any-request.ts

@@ -8,7 +8,7 @@ setTimeout(() => {
 
 
 try {
 try {
   ollama.generate({
   ollama.generate({
-    model: 'llama2',
+    model: 'llama3.1',
     prompt: 'Write a long story',
     prompt: 'Write a long story',
     stream: true,
     stream: true,
   }).then(
   }).then(

+ 1 - 1
examples/abort/specific-request.ts

@@ -11,7 +11,7 @@ setTimeout(() => {
 
 
 try {
 try {
   ollama.generate({
   ollama.generate({
-    model: 'llama2',
+    model: 'llama3.1',
     prompt: 'Write a long story',
     prompt: 'Write a long story',
     stream: true,
     stream: true,
   }).then(
   }).then(

+ 1 - 1
examples/pull-progress/pull.ts

@@ -1,6 +1,6 @@
 import ollama from 'ollama'
 import ollama from 'ollama'
 
 
-const model = 'llama2'
+const model = 'llama3.1'
 console.log(`downloading ${model}...`)
 console.log(`downloading ${model}...`)
 let currentDigestDone = false
 let currentDigestDone = false
 const stream = await ollama.pull({ model: model, stream: true })
 const stream = await ollama.pull({ model: model, stream: true })