瀏覽代碼

add insert support to generate endpoint (#113)

Bruce MacDonald 1 年之前
父節點
當前提交
fcd418a746
共有 3 個文件被更改,包括 5 次插入15 次删除
  1. 1 0
      README.md
  2. 3 15
      examples/fill-in-middle/fill.ts
  3. 1 0
      src/interfaces.ts

+ 1 - 0
README.md

@@ -85,6 +85,7 @@ ollama.generate(request)
 - `request` `<Object>`: The request object containing generate parameters.
   - `model` `<string>` The name of the model to use for the chat.
   - `prompt` `<string>`: The prompt to send to the model.
+  - `suffix` `<string>`: (Optional) Suffix is the text that comes after the inserted text.
   - `system` `<string>`: (Optional) Override the model system prompt.
   - `template` `<string>`: (Optional) Override the model template.
   - `raw` `<boolean>`: (Optional) Bypass the prompt template and pass the prompt directly to the model.

+ 3 - 15
examples/fill-in-middle/fill.ts

@@ -1,20 +1,8 @@
 import ollama from 'ollama'
 
-const prefix = `def remove_non_ascii(s: str) -> str:
-"""
-`
-const suffix = `
-return result
-`
 const response = await ollama.generate({
-  model: 'codellama:7b-code',
-  prompt: `<PRE> ${prefix} <SUF>${suffix} <MID>`,
-  options: {
-    num_predict: 128,
-    temperature: 0,
-    top_p: 0.9,
-    presence_penalty: 0,
-    stop: ['<EOT>'],
-  },
+  model: 'deepseek-coder-v2',
+  prompt: `def add(`,
+  suffix: `return c`,
 })
 console.log(response.response)

+ 1 - 0
src/interfaces.ts

@@ -46,6 +46,7 @@ export interface Options {
 export interface GenerateRequest {
   model: string
   prompt: string
+  suffix?: string
   system?: string
   template?: string
   context?: number[]