Browse Source

update create api for v0.5.5 and deprecate create from files (#192)

Add new create API structure, remove broken file streaming logic

- Updates CreateRequest interface with new fields
- Removes broken logic for streaming model creation from local files
- Previous implementation did not properly handle file streams
- Model creation from files should be handled by separate PR
- Updates documentation and types to reflect new API structure
Bruce MacDonald 4 months ago
parent
commit
f655d633a4
4 changed files with 26 additions and 123 deletions
  1. 10 2
      README.md
  2. 2 5
      src/browser.ts
  3. 6 113
      src/index.ts
  4. 8 3
      src/interfaces.ts

+ 10 - 2
README.md

@@ -129,11 +129,19 @@ ollama.create(request)
 
 - `request` `<Object>`: The request object containing create parameters.
   - `model` `<string>` The name of the model to create.
-  - `path` `<string>`: (Optional) The path to the Modelfile of the model to create.
-  - `modelfile` `<string>`: (Optional) The content of the Modelfile to create.
+  - `from` `<string>`: The base model to derive from.
   - `stream` `<boolean>`: (Optional) When true an `AsyncGenerator` is returned.
+  - `quantize` `<string>`: Quanization precision level (`q8_0`, `q4_K_M`, etc.).
+  - `template` `<string>`: (Optional) The prompt template to use with the model.
+  - `license` `<string|string[]>`: (Optional) The license(s) associated with the model.
+  - `system` `<string>`: (Optional) The system prompt for the model.
+  - `parameters` `<Record<string, unknown>>`: (Optional) Additional model parameters as key-value pairs.
+  - `messages` `<Message[]>`: (Optional) Initial chat messages for the model.
+  - `adapters` `<Record<string, string>>`: (Optional) A key-value map of LoRA adapter configurations.
 - Returns: `<ProgressResponse>`
 
+Note: The `files` parameter is not currently supported in `ollama-js`.
+
 ### delete
 
 ```javascript

+ 2 - 5
src/browser.ts

@@ -176,13 +176,10 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
    * @returns {Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>>} - The response object or a stream of progress responses.
    */
   async create(
-    request: CreateRequest,
+    request: CreateRequest
   ): Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>> {
     return this.processStreamableRequest<ProgressResponse>('create', {
-      name: request.model,
-      stream: request.stream,
-      modelfile: request.modelfile,
-      quantize: request.quantize,
+      ...request
     })
   }
 

+ 6 - 113
src/index.ts

@@ -1,10 +1,7 @@
-import * as utils from './utils.js'
 import { AbortableAsyncIterator } from './utils.js'
 
-import fs, { createReadStream, promises } from 'fs'
-import { dirname, join, resolve } from 'path'
-import { createHash } from 'crypto'
-import { homedir } from 'os'
+import fs, { promises } from 'fs'
+import { resolve } from 'path'
 import { Ollama as OllamaBrowser } from './browser.js'
 
 import type { CreateRequest, ProgressResponse } from './interfaces.js'
@@ -28,47 +25,6 @@ export class Ollama extends OllamaBrowser {
     return image
   }
 
-  /**
-   * Parse the modelfile and replace the FROM and ADAPTER commands with the corresponding blob hashes.
-   * @param modelfile {string} - The modelfile content
-   * @param mfDir {string} - The directory of the modelfile
-   * @private @internal
-   */
-  private async parseModelfile(
-    modelfile: string,
-    mfDir: string = process.cwd(),
-  ): Promise<string> {
-    const out: string[] = []
-    const lines = modelfile.split('\n')
-    for (const line of lines) {
-      const [command, args] = line.split(' ', 2)
-      if (['FROM', 'ADAPTER'].includes(command.toUpperCase())) {
-        const path = this.resolvePath(args.trim(), mfDir)
-        if (await this.fileExists(path)) {
-          out.push(`${command} @${await this.createBlob(path)}`)
-        } else {
-          out.push(`${command} ${args}`)
-        }
-      } else {
-        out.push(line)
-      }
-    }
-    return out.join('\n')
-  }
-
-  /**
-   * Resolve the path to an absolute path.
-   * @param inputPath {string} - The input path
-   * @param mfDir {string} - The directory of the modelfile
-   * @private @internal
-   */
-  private resolvePath(inputPath, mfDir) {
-    if (inputPath.startsWith('~')) {
-      return join(homedir(), inputPath.slice(1))
-    }
-    return resolve(mfDir, inputPath)
-  }
-
   /**
    * checks if a file exists
    * @param path {string} - The path to the file
@@ -84,60 +40,6 @@ export class Ollama extends OllamaBrowser {
     }
   }
 
-  private async createBlob(path: string): Promise<string> {
-    if (typeof ReadableStream === 'undefined') {
-      // Not all fetch implementations support streaming
-      // TODO: support non-streaming uploads
-      throw new Error('Streaming uploads are not supported in this environment.')
-    }
-
-    // Create a stream for reading the file
-    const fileStream = createReadStream(path)
-
-    // Compute the SHA256 digest
-    const sha256sum = await new Promise<string>((resolve, reject) => {
-      const hash = createHash('sha256')
-      fileStream.on('data', (data) => hash.update(data))
-      fileStream.on('end', () => resolve(hash.digest('hex')))
-      fileStream.on('error', reject)
-    })
-
-    const digest = `sha256:${sha256sum}`
-
-    try {
-      await utils.head(this.fetch, `${this.config.host}/api/blobs/${digest}`)
-    } catch (e) {
-      if (e instanceof Error && e.message.includes('404')) {
-        // Create a new readable stream for the fetch request
-        const readableStream = new ReadableStream({
-          start(controller) {
-            fileStream.on('data', (chunk) => {
-              controller.enqueue(chunk) // Enqueue the chunk directly
-            })
-
-            fileStream.on('end', () => {
-              controller.close() // Close the stream when the file ends
-            })
-
-            fileStream.on('error', (err) => {
-              controller.error(err) // Propagate errors to the stream
-            })
-          },
-        })
-
-        await utils.post(
-          this.fetch,
-          `${this.config.host}/api/blobs/${digest}`,
-          readableStream,
-        )
-      } else {
-        throw e
-      }
-    }
-
-    return digest
-  }
-
   create(
     request: CreateRequest & { stream: true },
   ): Promise<AbortableAsyncIterator<ProgressResponse>>
@@ -146,21 +48,12 @@ export class Ollama extends OllamaBrowser {
   async create(
     request: CreateRequest,
   ): Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>> {
-    let modelfileContent = ''
-    if (request.path) {
-      modelfileContent = await promises.readFile(request.path, { encoding: 'utf8' })
-      modelfileContent = await this.parseModelfile(
-        modelfileContent,
-        dirname(request.path),
-      )
-    } else if (request.modelfile) {
-      modelfileContent = await this.parseModelfile(request.modelfile)
-    } else {
-      throw new Error('Must provide either path or modelfile to create a model')
+    // fail if request.from is a local path
+    // TODO: https://github.com/ollama/ollama-js/issues/191
+    if (request.from && await this.fileExists(resolve(request.from))) {
+      throw Error('Creating with a local path is not currently supported from ollama-js')
     }
-    request.modelfile = modelfileContent
 
-    // check stream here so that typescript knows which overload to use
     if (request.stream) {
       return super.create(request as CreateRequest & { stream: true })
     } else {

+ 8 - 3
src/interfaces.ts

@@ -120,10 +120,15 @@ export interface PushRequest {
 
 export interface CreateRequest {
   model: string
-  path?: string
-  modelfile?: string
-  quantize?: string
+  from?: string
   stream?: boolean
+  quantize?: string
+  template?: string
+  license?: string | string[]
+  system?: string
+  parameters?: Record<string, unknown>
+  messages?: Message[]
+  adapters?: Record<string, string>
 }
 
 export interface DeleteRequest {