Selaa lähdekoodia

browser: update webCrawl and webFetch shapes (#249)

nicole pardal 1 päivä sitten
vanhempi
commit
cfb069eaf2
3 muutettua tiedostoa jossa 44 lisäystä ja 73 poistoa
  1. 25 42
      examples/websearch/websearch-tools.ts
  2. 13 14
      src/browser.ts
  3. 6 17
      src/interfaces.ts

+ 25 - 42
examples/websearch/websearch-tools.ts

@@ -1,5 +1,4 @@
-import ollama, { Ollama } from 'ollama'
-import type { Message } from 'ollama'
+import { Ollama, type Message, type SearchResponse, type FetchResponse } from 'ollama'
 
 async function main() {
 
@@ -14,52 +13,46 @@ async function main() {
     type: 'function',
     function: {
       name: 'webSearch',
-      description: 'Performs a web search for the given queries.',
+      description: 'Performs a web search for the given query.',
       parameters: {
         type: 'object',
         properties: {
-          queries: {
-            type: 'array',
-            items: { type: 'string' },
-            description: 'An array of search queries.',
-          },
+          query: { type: 'string', description: 'Search query string.' },
           max_results: {
             type: 'number',
             description: 'The maximum number of results to return per query (default 5, max 10).',
           },
         },
-        required: ['queries'],
+        required: ['query'],
       },
     },
   }
 
-  const webCrawlTool = {
+  const webFetchTool = {
     type: 'function',
     function: {
-      name: 'webCrawl',
-      description: 'Performs a web crawl for the given URLs.',
+      name: 'webFetch',
+      description: 'Fetches a single page by URL.',
       parameters: {
         type: 'object',
         properties: {
-          urls: {
-            type: 'array',
-            items: { type: 'string' },
-            description: 'An array of URLs to crawl.',
-          },
+          url: { type: 'string', description: 'A single URL to fetch.' },
         },
-        required: ['urls'],
+        required: ['url'],
       },
     },
   }
 
-  const availableTools = {
-    webSearch: async (args: { queries: string[]; max_results?: number }) => {
-      return await client.webSearch(args)
-    },
-    webCrawl: async (args: { urls: string[] }) => {
-      return await client.webCrawl(args)
-    },
-  }
+	const availableTools = {
+		webSearch: async (args: { query: string; max_results?: number }): Promise<SearchResponse> => {
+			const res = await client.webSearch(args)
+			return res as SearchResponse
+		},
+		webFetch: async (args: { url: string }): Promise<FetchResponse> => {
+			const res = await client.webFetch(args)
+			return res as FetchResponse
+		},
+	}
 
   const messages: Message[] = [
     {
@@ -69,33 +62,23 @@ async function main() {
   ]
 
   console.log('----- Prompt:', messages.find((m) => m.role === 'user')?.content, '\n')
-
+  
   while (true) {
-    const response = await client.chat({
-      model: 'gpt-oss',
+	const response = await client.chat({
+      model: 'qwen3',
       messages: messages,
-      tools: [webSearchTool, webCrawlTool],
+      tools: [webSearchTool, webFetchTool],
       stream: true,
       think: true,
     })
 
     let hadToolCalls = false
-    let startedThinking = false
-    let finishedThinking = false
     var content = ''
     var thinking = ''
     for await (const chunk of response) {
-      if (chunk.message.thinking && !startedThinking) {
-        startedThinking = true
-        process.stdout.write('Thinking:\n========\n\n')
-      } else if (chunk.message.content && startedThinking && !finishedThinking) {
-        finishedThinking = true
-        process.stdout.write('\n\nResponse:\n========\n\n')
-      }
 
       if (chunk.message.thinking) {
         thinking += chunk.message.thinking
-        process.stdout.write(chunk.message.thinking)
       }
       if (chunk.message.content) {
         content += chunk.message.content
@@ -116,7 +99,7 @@ async function main() {
             const args = toolCall.function.arguments as any
             console.log('\nCalling function:', toolCall.function.name, 'with arguments:', args)
             const output = await functionToCall(args)
-            console.log('Function output:', JSON.stringify(output).slice(0, 200), '\n')
+            console.log('Function result:', JSON.stringify(output).slice(0, 200), '\n')
             
             messages.push(chunk.message)
             messages.push({
@@ -134,7 +117,7 @@ async function main() {
       break
     }
 
-    console.log('----- Sending result back to model \n')
+    
   }
 }
 

+ 13 - 14
src/browser.ts

@@ -26,8 +26,8 @@ import type {
   StatusResponse,
   SearchRequest,
   SearchResponse,
-  CrawlRequest,
-  CrawlResponse,
+  FetchRequest,
+  FetchResponse,
 } from './interfaces.js'
 import { defaultHost } from './constant.js'
 
@@ -327,13 +327,13 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
 
   /**
    * Performs web search using the Ollama web search API
-   * @param request {SearchRequest} - The search request containing queries and options
+   * @param request {SearchRequest} - The search request containing query and options
    * @returns {Promise<SearchResponse>} - The search results
    * @throws {Error} - If the request is invalid or the server returns an error
    */
   async webSearch(request: SearchRequest): Promise<SearchResponse> {
-    if (!request.queries || request.queries.length === 0) {
-      throw new Error('At least one query is required')
+    if (!request.query || request.query.length === 0) {
+      throw new Error('Query is required')
     }
 
     const response = await utils.post(this.fetch, `https://ollama.com/api/web_search`, { ...request }, {
@@ -343,20 +343,19 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
   }
 
   /**
-   * Performs web crawl using the Ollama web crawl API
-   * @param request {CrawlRequest} - The crawl request containing URLs and options
-   * @returns {Promise<CrawlResponse>} - The crawl results
+   * Fetches a single page using the Ollama web fetch API
+   * @param request {FetchRequest} - The fetch request containing a URL
+   * @returns {Promise<FetchResponse>} - The fetch result
    * @throws {Error} - If the request is invalid or the server returns an error
    */
-  async webCrawl(request: CrawlRequest): Promise<CrawlResponse> {
-    if (!request.urls || request.urls.length === 0) {
-      throw new Error('At least one URL is required')
+  async webFetch(request: FetchRequest): Promise<FetchResponse> {
+    if (!request.url || request.url.length === 0) {
+      throw new Error('URL is required')
     }
-
-    const response = await utils.post(this.fetch, `https://ollama.com/api/web_crawl`, { ...request }, {
+    const response = await utils.post(this.fetch, `https://ollama.com/api/web_fetch`, { ...request }, {
       headers: this.config.headers
     })
-    return (await response.json()) as CrawlResponse
+    return (await response.json()) as FetchResponse
   }
 }
 

+ 6 - 17
src/interfaces.ts

@@ -272,37 +272,26 @@ export interface StatusResponse {
 
 // Web Search types
 export interface SearchRequest {
-  queries: string[]
+  query: string
   max_results?: number
 }
 
-
 export interface SearchResult {
-  title: string
-  url: string
   content: string
 }
 
 export interface SearchResponse {
-  results: Record<string, SearchResult[]>
-  success: boolean
-  errors?: string[]
+  results: SearchResult[]
 }
 
-// Crawl types - commented out removed fields
-export interface CrawlRequest {
-  urls: string[]
+// Fetch types
+export interface FetchRequest {
+  url: string
 }
 
-export interface CrawlResult {
+export interface FetchResponse {
   title: string
   url: string
   content: string
   links: string[]
 }
-
-export interface CrawlResponse {
-  results: Record<string, CrawlResult[]>
-  success: boolean
-  errors?: string[]
-}