diff --git a/examples/ai-core/src/generate-object/ollama-request-body.ts b/examples/ai-core/src/generate-object/ollama-request-body.ts new file mode 100755 index 0000000..7581fb1 --- /dev/null +++ b/examples/ai-core/src/generate-object/ollama-request-body.ts @@ -0,0 +1,31 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateObject } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const { request } = await generateObject({ + model: ollama(model), + prompt: 'Generate a lasagna recipe.', + schema: z.object({ + recipe: z.object({ + ingredients: z.array( + z.object({ + amount: z.string(), + name: z.string(), + }), + ), + name: z.string(), + steps: z.array(z.string()), + }), + }), + }) + + console.log('REQUEST BODY') + console.log(request.body) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/generate-text/ollama-active-tools.ts b/examples/ai-core/src/generate-text/ollama-active-tools.ts new file mode 100755 index 0000000..2a690f4 --- /dev/null +++ b/examples/ai-core/src/generate-text/ollama-active-tools.ts @@ -0,0 +1,29 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateText, tool } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' +import { weatherTool } from '../tools/weather-tool' + +async function main(model: Parameters[0]) { + const { text } = await generateText({ + // disable all tools + experimental_activeTools: [], + maxSteps: 5, + model: ollama(model), + prompt: + 'What is the weather in San Francisco and what attractions should I visit?', + tools: { + cityAttractions: tool({ + parameters: z.object({ city: z.string() }), + }), + weather: weatherTool, + }, + }) + + console.log(text) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/generate-text/ollama-multimodal-base64.ts b/examples/ai-core/src/generate-text/ollama-image-base64.ts similarity index 100% rename from examples/ai-core/src/generate-text/ollama-multimodal-base64.ts rename to examples/ai-core/src/generate-text/ollama-image-base64.ts diff --git a/examples/ai-core/src/generate-text/ollama-multimodal-url.ts b/examples/ai-core/src/generate-text/ollama-image-url.ts similarity index 100% rename from examples/ai-core/src/generate-text/ollama-multimodal-url.ts rename to examples/ai-core/src/generate-text/ollama-image-url.ts diff --git a/examples/ai-core/src/generate-text/ollama-multimodal.ts b/examples/ai-core/src/generate-text/ollama-image.ts similarity index 100% rename from examples/ai-core/src/generate-text/ollama-multimodal.ts rename to examples/ai-core/src/generate-text/ollama-image.ts diff --git a/examples/ai-core/src/generate-text/ollama-request-body.ts b/examples/ai-core/src/generate-text/ollama-request-body.ts new file mode 100755 index 0000000..76d5abd --- /dev/null +++ b/examples/ai-core/src/generate-text/ollama-request-body.ts @@ -0,0 +1,18 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateText } from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const { request } = await generateText({ + model: ollama(model), + prompt: 'Invent a new holiday and describe its traditions.', + }) + + console.log('REQUEST BODY') + console.log(request.body) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/generate-text/ollama-timeout.ts b/examples/ai-core/src/generate-text/ollama-timeout.ts new file mode 100755 index 0000000..c868eca --- /dev/null +++ b/examples/ai-core/src/generate-text/ollama-timeout.ts @@ -0,0 +1,20 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateText } from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const { text, usage } = await generateText({ + abortSignal: AbortSignal.timeout(1000), + model: ollama(model), + prompt: 'Invent a new holiday and describe its traditions.', + }) + + console.log(text) + console.log() + console.log('Usage:', usage) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/generate-text/ollama-tool-call-choice.ts b/examples/ai-core/src/generate-text/ollama-tool-call-choice.ts new file mode 100755 index 0000000..3ae8741 --- /dev/null +++ b/examples/ai-core/src/generate-text/ollama-tool-call-choice.ts @@ -0,0 +1,31 @@ +#! /usr/bin/env -S pnpm tsx + +import { generateText, tool } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' +import { weatherTool } from '../tools/weather-tool' + +async function main(model: Parameters[0]) { + const result = await generateText({ + maxTokens: 512, + model: ollama(model), + prompt: + 'What is the weather in San Francisco and what attractions should I visit?', + toolChoice: { + toolName: 'weather', + type: 'tool', + }, + tools: { + cityAttractions: tool({ + parameters: z.object({ city: z.string() }), + }), + weather: weatherTool, + }, + }) + + console.log(JSON.stringify(result, null, 2)) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/stream-object/ollama-request-body.ts b/examples/ai-core/src/stream-object/ollama-request-body.ts new file mode 100755 index 0000000..f3b7803 --- /dev/null +++ b/examples/ai-core/src/stream-object/ollama-request-body.ts @@ -0,0 +1,37 @@ +#! /usr/bin/env -S pnpm tsx + +import { streamObject } from 'ai' +import { ollama } from 'ollama-ai-provider' +import { z } from 'zod' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const result = await streamObject({ + maxTokens: 2000, + model: ollama(model), + prompt: + 'Generate 3 character descriptions for a fantasy role playing game.', + schema: z.object({ + characters: z.array( + z.object({ + class: z + .string() + .describe('Character class, e.g. warrior, mage, or thief.'), + description: z.string(), + name: z.string(), + }), + ), + }), + }) + + // consume stream + for await (const part of result.partialObjectStream) { + } + + console.log('REQUEST BODY') + // eslint-disable-next-line unicorn/no-await-expression-member + console.log((await result.request).body) +} + +buildProgram('llama3.1', main).catch(console.error) diff --git a/examples/ai-core/src/stream-text/ollama-request-body.ts b/examples/ai-core/src/stream-text/ollama-request-body.ts new file mode 100755 index 0000000..3df9a05 --- /dev/null +++ b/examples/ai-core/src/stream-text/ollama-request-body.ts @@ -0,0 +1,26 @@ +#! /usr/bin/env -S pnpm tsx + +import { streamText } from 'ai' +import { ollama } from 'ollama-ai-provider' + +import { buildProgram } from '../tools/command' + +async function main(model: Parameters[0]) { + const result = await streamText({ + maxRetries: 5, + maxTokens: 512, + model: ollama(model), + prompt: 'Invent a new holiday and describe its traditions.', + temperature: 0.3, + }) + + // consume stream + for await (const textPart of result.textStream) { + } + + console.log('REQUEST BODY') + // eslint-disable-next-line unicorn/no-await-expression-member + console.log((await result.request).body) +} + +buildProgram('llama3.1', main).catch(console.error)