From c6c1b892f020cc18f44f73344bc83d60c247617d Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 13:33:02 -0500 Subject: [PATCH 01/23] adding attachments --- src/Attachment.ts | 9 ++ src/LLM.ts | 34 ++++++- src/LLM.types.ts | 4 + src/index.ts | 7 ++ test/attachments.test.ts | 200 +++++++++++++++++++++++++++++++++++++++ test/currentService.ts | 4 +- test/vitest.config.ts | 2 +- vite.config.ts | 2 +- 8 files changed, 254 insertions(+), 8 deletions(-) create mode 100644 src/Attachment.ts create mode 100644 test/attachments.test.ts diff --git a/src/Attachment.ts b/src/Attachment.ts new file mode 100644 index 0000000..8e4de3f --- /dev/null +++ b/src/Attachment.ts @@ -0,0 +1,9 @@ +export default class Attachment { + public data: string; + public contentType: string; + + constructor(data: string, contentType: string) { + this.data = data; + this.contentType = contentType; + } +} \ No newline at end of file diff --git a/src/LLM.ts b/src/LLM.ts index b4c6e8e..cce9952 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -2,6 +2,7 @@ import logger from './logger'; const log = logger("llm.js:index"); import ModelUsage from "./ModelUsage"; +import Attachment from "./Attachment"; import type { ModelUsageType } from "./ModelUsage"; import config from "./config"; import * as parsers from "./parsers"; @@ -117,14 +118,21 @@ export default class LLM { } addMessage(role: MessageRole, content: MessageContent) { this.messages.push({ role, content }) } - user(content: string) { this.addMessage("user", content) } + user(content: string, attachments?: Attachment[]) { + if (attachments && attachments.length > 0) { + this.addMessage("user", { type: "text", text: content, attachments }); + } else { + this.addMessage("user", { type: "text", text: content }); + } + } assistant(content: string) { this.addMessage("assistant", content) } system(content: string) { this.addMessage("system", content) } thinking(content: string) { this.addMessage("thinking", content) } toolCall(tool: ToolCall) { this.addMessage("tool_call", tool) } async chat(input: string, options?: Options): Promise | Response | PartialStreamResponse> { - this.user(input); + const attachments = options?.attachments || []; + this.user(input, attachments); return await this.send(options); } @@ -135,6 +143,8 @@ export default class LLM { } async send(options?: Options): Promise | Response | PartialStreamResponse> { + delete options?.attachments; + const vanillaOptions = { ...this.llmOptions, ...options || {} }; const opts = this.parseOptions(JSON.parse(JSON.stringify(vanillaOptions))); @@ -409,8 +419,24 @@ export default class LLM { parseModel(model: any): Model { throw new Error("parseModel not implemented") } parseMessages(messages: Message[]): Message[] { return messages.map(message => { - if (message.role === "thinking" || message.role === "tool_call") message.role = "assistant"; - return message; + const messageCopy = JSON.parse(JSON.stringify(message)); + + if (messageCopy.role === "thinking" || messageCopy.role === "tool_call") messageCopy.role = "assistant"; + + if (messageCopy.content.attachments) { + const content = []; + for (const attachment of message.content.attachments) { + content.push({ type: "image", source: { type: "base64", media_type: attachment.contentType, data: attachment.data } }); + } + + content.push({ type: "text", text: message.content.text }); + + messageCopy.content = content; + } else if (typeof messageCopy.content !== "string") { + messageCopy.content = JSON.stringify(messageCopy.content); + } + + return messageCopy; }); } diff --git a/src/LLM.types.ts b/src/LLM.types.ts index 4347fba..5d956d5 100644 --- a/src/LLM.types.ts +++ b/src/LLM.types.ts @@ -1,5 +1,6 @@ import { type ModelUsageType } from "./ModelUsage"; import type { OpenAITool } from "./openai"; +import type Attachment from "./Attachment"; /** * @category Options @@ -104,6 +105,9 @@ export interface Options { /** Quality filter when dealing with model usage */ qualityFilter?: QualityFilter; + + /** Attachments to send to the model */ + attachments?: Attachment[]; } /** diff --git a/src/index.ts b/src/index.ts index 6e67158..88dd62e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -10,6 +10,9 @@ import DeepSeek from "./deepseek"; import LLM from "./LLM"; import APIv1 from "./APIv1"; +import Attachment from "./Attachment"; + + import ModelUsage from "./ModelUsage"; import * as parsers from "./parsers"; @@ -78,6 +81,8 @@ export interface LLMInterface { DeepSeek: typeof DeepSeek; APIv1: typeof APIv1; + Attachment: typeof Attachment; + register(LLMClass: typeof LLM): void; unregister(LLMClass: typeof LLM): void; } @@ -130,6 +135,8 @@ LLMWrapper.DeepSeek = DeepSeek; LLMWrapper.APIv1 = APIv1; LLMWrapper.LLM = LLM; +LLMWrapper.Attachment = Attachment; + LLMWrapper.register = (LLMClass: typeof LLM) => { SERVICES.push(LLMClass as any); }; diff --git a/test/attachments.test.ts b/test/attachments.test.ts new file mode 100644 index 0000000..4ea4b84 --- /dev/null +++ b/test/attachments.test.ts @@ -0,0 +1,200 @@ +import { describe, it, expect } from "vitest"; +import LLM from "../src/index.js"; +import type { Response } from "../src/LLM.types.js"; +import currentService from "./currentService.js"; + +const taco = `/9j/4QDKRXhpZgAATU0AKgAAAAgABgESAAMAAAABAAEAAAEaAAUAAAABAAAAVgEbAAUAAAABAAAAXgEoAAMAAAABAAIAAAITAAMAAAABAAEAAIdpAAQAAAABAAAAZgAAAAAAAABIAAAAAQAAAEgAAAABAAeQAAAHAAAABDAyMjGRAQAHAAAABAECAwCgAAAHAAAABDAxMDCgAQADAAAAAQABAACgAgAEAAAAAQAAAUKgAwAEAAAAAQAAARmkBgADAAAAAQAAAAAAAAAAAAD/4gIcSUNDX1BST0ZJTEUAAQEAAAIMYXBwbAQAAABtbnRyUkdCIFhZWiAH6QAGABYACwA6ADNhY3NwQVBQTAAAAABBUFBMAAAAAAAAAAAAAAAAAAAAAAAA9tYAAQAAAADTLWFwcGxSi53gmVFc+zG5zZ0lH9NFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAApkZXNjAAAA/AAAADRjcHJ0AAABMAAAAFB3dHB0AAABgAAAABRyWFlaAAABlAAAABRnWFlaAAABqAAAABRiWFlaAAABvAAAABRyVFJDAAAB0AAAABBjaGFkAAAB4AAAACxiVFJDAAAB0AAAABBnVFJDAAAB0AAAABBtbHVjAAAAAAAAAAEAAAAMZW5VUwAAABgAAAAcAEQARQBMAEwAIABVADIANQAxADUASABYbWx1YwAAAAAAAAABAAAADGVuVVMAAAA0AAAAHABDAG8AcAB5AHIAaQBnAGgAdAAgAEEAcABwAGwAZQAgAEkAbgBjAC4ALAAgADIAMAAyADVYWVogAAAAAAAA9tYAAQAAAADTLVhZWiAAAAAAAABvogAAOPUAAAOQWFlaIAAAAAAAAGKZAAC3hQAAGNpYWVogAAAAAAAAJKAAAA+EAAC2z3BhcmEAAAAAAAAAAAAB9gRzZjMyAAAAAAABDEIAAAXe///zJgAAB5MAAP2Q///7ov///aMAAAPcAADAbv/bAIQAAQEBAQEBAgEBAgMCAgIDBAMDAwMEBQQEBAQEBQYFBQUFBQUGBgYGBgYGBgcHBwcHBwgICAgICQkJCQkJCQkJCQEBAQECAgIEAgIECQYFBgkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJ/90ABAAH/8AAEQgAVwBkAwEiAAIRAQMRAf/EAaIAAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKCxAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6AQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgsRAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A/vAooor0DnCiigDPAoAKK47x98QvAnwq8OyeLfibrNl4e0uHhrrUZ0togeyhpCMsegVck9AK/LL44/8ABVVdP16x8B/so+A9R8daxPNDLcXurpLoei29hvxLK1xOhuiXAKwFLVkZueVHPyXFnHuSZDR9vnOKhRja/vyUdF2W7t5I6sPga9bSjBy2Wib32Wh+vvSk3p6ivwq8V/tQ/tdeNpJRdeMrLwrbOfkg8MabF5qKf4WvtU+1tJj+8ltB9B0Hmcnjv9oJGaez+LHjOKZgMP8AbbOQDGMYjksWi7f3MHvX8t5j9PLgSjW9lh/a1Y/zRppL7qkoS/8AJT9gwPgFxDWhzyjGHk5a/gmj+iOivwe8J/td/tm/Dt1MuvaR8QrNB81rr9mumXj/AO7qOloIl/4Hp7/UV+iH7Of7cXwu+PusL8P9Ts7vwZ428t5P7A1jy/MuI4yQ02n3MLPbX0QA3HyX82NSvnRRE4r9q8OPpAcKcVS9jlOKXtf5JLll8k9/+3bnxnE3h1nGULnxtG0e61X3rb52PtGikHtS1+znxAUUUUAf/9D+8Ciij6V6BzjWZUUu5AAGSTwAB/IAflX5FftSftrf8LR8N6z8D/2abmSNddtrjS28XxSNGsZmRo3bRVT57mWPnF2SlrGfmRpypUeP/wDBQj9sLRPEuqar8GLLUfsfhDRZGttZJyi6zdR/6y08wH5tPtmBjuI1/wCPqcNC37iKVJfyk0yaD9p/UdQ/4Q7xLcaVc6FHcwi/iimNvBPPboFSUR7cq8UnlgAqNrMFHHH+fX0jPpT4zCYyrw9wxP2Sp+7WxTjzxpvbkpx+1JOyk9o6pJy2/RfDnD8LVM1w2D4hxLXtW48tNc3JZfHWs17OC0v9q3vJWQ79l34ReLvBviuGHxdqSatqOmQzWuuwfaXubjw3eMiXFtFGbuSdwby3dS8luy/MMPu6j9B1WNIRDANqD5sHkk/3mPdveuK+F/wu8P8Awm8HW3hLRkDzrHEb+9bLT311HEkLXNxI2WkkZUVck8KoAAAAHflSK/xx4w4vlnmcTzavNzeiTfZdbdPJbqNk25czf9j+GPB1PIspp4Gnolra97X8+/V9OZvlUY2iqDgjG2mEYq6yk1XdX3AY+WjC44/RYysVWTJwK5Txh4O8O+O9Cfw74ngMtuzLJG0UkkE8Eyf6ue2uIWSa2uIj80U8LpLGwBRgRXWo6yZ29AcUMpr6nLc3q0Ksa1GTjKLTTWjTWzVtrBVpwqQcKiunpbofoB+xJ+1f4r8T69/wzj8dbk3viO2tWutE15xHGNcs4f8AWxzIm1U1K0XabgKqx3ERFxEFxPFD+mlfzJ+PbvWvDmiD4g+FCU1zwjMmv6XIv3hdadmUID/dniEltIP4opXXoa/pT8O69p3irw9YeKNIObTU7aG7g/65zxrIn/jrCv8Ab76JPjXX4wyCcMwd8Rh2oyf8ya9yXq7NPzV+tl/BvjNwJSyXMVLCq1Korpdmt16bW9bdDZooor+rD8gP/9H+8Cvl/wDbA+Ll78H/AIH3+peH7kWmuazImj6TKesVzdBt04HGfssCy3GP+meK+oK/ED/grhd6n4u1zwx8NdP1W40mGw0y71KdrURl3e/mSxjX5wdo8mO4GVAOGIBxmvzTx344fDfCGPziE+SUIWjK1+WU2oQdktbSknbyPV4ZyueMx9LD06fO2/huldLVq7sldK1+h+SPw8+EnwiubrV7mxkutWv9LuY10b+2pzLYwG2XfHFLNK5luwXYm5UNk7OeSxPcfDO21j4dftKXXgbwRYHTfD/jVLzxLqsMg3PHc2wtokW1mibYLNiSqQuDlSSoQhs/mB8a9JvvgxrGn6BrHiaS60W+DSQLJEEktXB2v/q/3YVh/FgHrwa+2v8Agnd4l1Hx74r8U6vf6k+pw+H9NsdKtWk5aMTSyzuhYKAflSPA7DHA7/4u8Y5bThwbic+df2qcOX327vm5YKSi1upNS9ddbu/7z4D+CGf4mviOKq1bBUcPha0ovDxm/b3cWly8sXzpe1V1KaTULcqjDlP1D4bO3txUYiCuzdc1dCoV3R8A/hUKtHMokiIYdiOlfwthsdbY/rBSuUmhXzPM9sYprABT2FTTeTE4mmIHRRn3p7ID1r6HDY7Y0UrGXAwmhEyDaD0FMeImUODgAdKvSIXBjXr04/8ArVz/AIbl8XfFTxfcfDT4DaM3i3XbJvKvWSTyNM0ttu4f2nqG147dsEYgRZbpgQVgK/MP0ngjhvNc+x0cvybDyq1ZbRir6d30SXVuyRwZrneEwNB4jGVFCK7/AKf5I5n4j+JtE8IeANZ8R6/KILS3tZVYn+JpFKRxoOrPI5CRouWdiFUE4Ff0B/s46FrXhb9njwF4Y8SQPbajp3hvSbW6hk+/HNDZxJIjf7SsCD7ivm34CfsGeBPh3rmnfE/4uzx+M/GmnHzrOeWHy9N0qVl2ltMsmLhJNuV+1zGS5ILBXjQ+WPviv9w/olfR8xvA2X4irmlROviOTmjH4YKHNZX6v3ne2miSutT+JfF7xGo59iKccLC1Oneze7vbp0Wmn6bBRRRX9cn48f/S/vAr8D/+Cnt3FZ/tBNNcNsjj8K6U7E/dCi/1PJ/Cv3wr8Kv+CyHgrUpJ/DvinSULf23oOtaFtXAaW8tPK1WzjBJCgtDFegZxX8+fS7ySWYeHmYYaP/TtvyjGrTlJ/KKb+R9PwHmM8JmcK9JXklKyXV8klFL1dkfzgftPfF34MHwLq2rJIl1rTJFb26XLvEhVu4QAnaBnIVQ7dsdR9Jf8EZvF/wAHfEv7PXiO1+GJvXu9K1v7NrD3ibN1yYVkjMa9l8pgCMcGvxD+OFro2oeJ7yX4mafqFrFpt1ElzZ3NvsZEMeYwXz5bQyP8g2sc/TNfuT/wS4l+GWgR638N/h2rw/8AEp03U9QszCqQ2l60k0UkKOoAf5drE8kdyeK/y58eMkw+F8OcTChzS1pyvdcqipw276NWtZJP0P036LXh9iMlwTxOLVqtV3lzX532W9kr3eqve6tofreIkQYQcdcVWit44FKR5wSTz71pYRxujOR7dK5bxB4i0fwjp1z4i8X30Gm6bCyJ51w6om5yFVc9S7tgIgBZjgKCeK/zHyqpWrzVGirydkkt29kkluz+wJVIxjzSdkjRuI4so0gztPy49fwrk/EXjPSPD2qWPhtY7nU9c1XjT9G02B7zUrzBVSYLWLLmNSw3zNshiHMkiLzXvHwp/Zn+P37QaRaqYZ/ht4Tlwy6jqVqra7dxkZDWWmzgpZKc8TX6mTj/AI9OjV+qfwN/Zs+Dv7O2l3Nn8MdJEF5qOG1HVLqRrrU7916Nd3ku6aXH8KZEaD5URVAFf6g/R9/Z68QZ5GnmHF0nhMP/ACf8vpL02p/9vXl/c6n4Xxp47YPBXw+WL2k+/wBlf/JfLTzPzU+En/BPj4rfFaVfEv7T2oSeEdAnIf8A4RLQ7of2hcLg/u9V1iAjyl5G6204rgjDXcqMVr9cfAPw98CfCvwhY/D/AOGmj2WgaHpkYitLDT4Et7aFB2SOMBR7nqepJNdhRX+x/hz4WZDwngVgMhw0aUNL2+KVuspPVv1emystD+VOIOJsdmlb2+OqOT6dl6LZBRRRX6CeEFFFFAH/0/7wK+HP+Cinww1/4kfsra5qfgnTpNW8R+DZIfFOk2VuFM93NpRMs9lDvKr5l7Zm4tVyQMyjJAr7joBIII4xU5zlGHzDB1cBi43p1IuEl3jJWa+7QWFxE6NSNWm7OLTXy2P85r9vm7+D/i0+EtOtNWh1F9Xtje6MpVpLK+0y+RXtbmSSPHyMNvljfgksMHBr7A/4JB/BNvBFlr3iaK+t4YxbR6auk2dsI41PmCVrrzCS7PK+5SCByDjNfoz+3z/wRG8XeNvHF3f/AAM0NPFfgLWZ7y6/se31z/hH9f8ADs2oSG4vbfSLuaGa1utKu5yZhZXDwNZSM/2eQwskMXrv7JX/AARa8XeEfh5a/DL4qa5L4J8Ds7TajoHh/V7rUdf1piCmNV8USLBLDF5e1Tb6bFEyhQi3Xl5U/wCXHE30TuK6+RT4EwK/dydvbTcPZKHPzc6UXzqfL7vs+S6k37/s7N/1HgfF/K4w+u1V79tldO/bs153WnS5Q8I+IfGnxz8aah8KP2U9Ih8XaxolxHaa1qlxM9v4f0R25eO+v0STzrxI/m/s+0ElxynnfZo5Flr9Wv2d/wBhfwF8HtVtPiT8Rr5/Hnj6BPl1m/iWK2sWIO5dI08F4bBDuI3gyXTrgS3D4GPrD4d/Dj4f/CLwPpnwz+FeiWPhzw7osC21hpmmwJbWltCgwEihjCqo+g5712lf1Z9Hn6HfCHh3RjVwNL22LtrXmk5f9uLamvKOttHKR+LcY+JWZZy3CrLlp9ILb59/y7JB70UUV/Vp+fBRRRQAUUUUAFFFFAH/1P7wKKKK9A5wooooAKKKKACiiigAooooAKKKKACiiigD/9k=`; + +// read from file & attach + +// document in readme +// pdf / remote vs local +// image + +/* + + { + "type": "image", + "source": { + "type": "base64", + "media_type": "image/jpeg", + "data": "/9j/4AAQSkZJRg...", + } + +{ + "type": "document", + "source": { + "type": "url", + "url": "https://assets.anthropic.com/m/1cd9d098ac3e6467/original/Claude-3-Model-Card-October-Addendum.pdf" + } + }, + + + + "type": "document", + "source": { + "type": "base64", + "media_type": "application/pdf", + "data": $PDF_BASE64 + } + +*/ + +// llm shorthand +// llm instance +// stream +// message history +// image from file +// image from url +// pdf from file +// pdf from url + +describe("image", function () { + LLM.services.forEach(s => { + const service = s.service; + + let max_tokens = 200; + if (currentService && service !== currentService) return; + if (service === "google") max_tokens = 5048; // google returns no response if max_tokens is hit! + + it.only(`${service} base64 image instance`, async function () { + const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + expect(tacoAttachment.data).toBe(taco); + expect(tacoAttachment.contentType).toBe("image/jpeg"); + + const llm = new LLM({ service, max_tokens: max_tokens }); + const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as string; + expect(response).toBeDefined(); + expect(response.length).toBeGreaterThan(0); + expect(response.toLowerCase()).toContain("taco"); + expect(llm.messages.length).toBe(2); + expect(llm.messages[0].content.attachments).toBeDefined(); + expect(llm.messages[0].content.attachments.length).toBe(1); + + const response2 = await llm.chat("what is the color of the shell?") as string; + expect(response2).toBeDefined(); + expect(response2.length).toBeGreaterThan(0); + expect(response2.toLowerCase()).toContain("yellow"); + expect(llm.messages.length).toBe(4); + }); + + /* + it(`${service} instance`, async function () { + const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); + const response = await llm.send(); + expect(response).toBeDefined(); + expect(llm.messages.length).toBe(2); + expect(llm.messages[0].role).toBe("user"); + expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); + expect(llm.messages[1].role).toBe("assistant"); + expect(llm.messages[1].content.toLowerCase()).toContain("blue"); + }); + + it(`${service} instance chat`, async function () { + const llm = new LLM({ max_tokens, service }); + const response = await llm.chat("in one word the color of the sky is usually"); + expect(response).toBeDefined(); + expect(llm.messages.length).toBe(2); + expect(llm.messages[0].role).toBe("user"); + expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); + expect(llm.messages[1].role).toBe("assistant"); + expect(llm.messages[1].content.toLowerCase()).toContain("blue"); + }); + + it(`${service} settings override`, async function () { + const llm = new LLM({ service }); + const response = await llm.chat("the color of the sky is usually", { max_tokens: max_tokens }); + expect(response).toBeDefined(); + expect(llm.messages.length).toBe(2); + expect(llm.messages[0].role).toBe("user"); + expect(llm.messages[0].content).toBe("the color of the sky is usually"); + expect(llm.messages[1].role).toBe("assistant"); + expect(llm.messages[1].content.toLowerCase()).toContain("blue"); + expect(llm.messages[1].content.length).toBeGreaterThan(3); + }); + + + it(`${service} extended`, async function () { + const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service, extended: true }); + const response = await llm.send() as Response; + expect(response).toBeDefined(); + expect(response).toBeInstanceOf(Object); + expect(response.service).toBe(service); + expect(response.content).toBeDefined(); + expect(response.content.length).toBeGreaterThan(0); + expect(response.content.toLowerCase()).toContain("blue"); + expect(response.options).toBeDefined(); + expect(response.options.max_tokens).toBe(max_tokens); + expect(response.messages.length).toBe(2); + expect(response.messages[0].role).toBe("user"); + expect(response.messages[0].content).toBe("in one word the color of the sky is usually"); + expect(response.messages[1].role).toBe("assistant"); + expect(response.messages[1].content.toLowerCase()).toContain("blue"); + expect(response.usage.input_tokens).toBeGreaterThan(0); + expect(response.usage.output_tokens).toBeGreaterThan(0); + expect(response.usage.total_tokens).toBe(response.usage.input_tokens + response.usage.output_tokens); + expect(response.usage.local).toBe(llm.isLocal); + if (llm.isLocal) { + expect(response.usage.input_cost).toBe(0); + expect(response.usage.output_cost).toBe(0); + expect(response.usage.total_cost).toBe(0); + } else { + expect(response.usage.input_cost).toBeGreaterThan(0); + expect(response.usage.output_cost).toBeGreaterThan(0); + expect(response.usage.total_cost).toBeGreaterThan(0); + } + }); + + it(`${service} abort`, async function () { + const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); + return new Promise((resolve, reject) => { + llm.send().then(() => { + resolve(false); + }).catch((e: any) => { + expect(e.name).toBe("AbortError"); + resolve(true); + }); + + setTimeout(() => { llm.abort() }, 50); + }); + }); + + it(`${service} temperature`, async function () { + const response = await LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service, temperature: 1, extended: true }) as unknown as Response; + expect(response).toBeDefined(); + expect(response.content).toBeDefined(); + expect(response.content.length).toBeGreaterThan(0); + expect(response.content.toLowerCase()).toContain("blue"); + expect(response.options).toBeDefined(); + expect(response.options.temperature).toBe(1); + }); + + it(`${service} temperature override`, async function () { + const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service, temperature: 0, extended: true }); + const response = await llm.send({ temperature: 1 }) as unknown as Response; + expect(response).toBeDefined(); + expect(response.content).toBeDefined(); + expect(response.content.length).toBeGreaterThan(0); + expect(response.content.toLowerCase()).toContain("blue"); + expect(response.options).toBeDefined(); + expect(response.options.temperature).toBe(1); + }); + }); + + it(`anthropic max_thinking_tokens`, async function () { + if (currentService && (currentService as string) !== "anthropic") return; + const service = "anthropic"; + const options = { max_tokens: 5048, max_thinking_tokens: 1025, service, think: true, model: "claude-opus-4-20250514" } as any; + const response = await LLM("in one word the color of the sky is usually", options) as unknown as Response; + expect(response).toBeDefined(); + expect(response.options.think).toBe(true); + expect(response.options.max_thinking_tokens).toBe(1025); + expect(response.thinking).toBeDefined(); + expect(response.thinking!.length).toBeGreaterThan(0); + expect(response.thinking!.toLowerCase()).toContain("blue"); + expect(response.content.length).toBeGreaterThan(0); + expect(response.content.toLowerCase()).toContain("blue"); + }); + */ + }); +}); \ No newline at end of file diff --git a/test/currentService.ts b/test/currentService.ts index da95627..a70c2ab 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -// export default "google"; -export default undefined; \ No newline at end of file +export default "anthropic"; +// export default undefined; \ No newline at end of file diff --git a/test/vitest.config.ts b/test/vitest.config.ts index 8a529da..94719ad 100644 --- a/test/vitest.config.ts +++ b/test/vitest.config.ts @@ -6,7 +6,7 @@ export default defineConfig({ slowTestThreshold: 15000, setupFiles: ["./test/setup.ts"], bail: 1, - retry: 5, + retry: 0, reporters: ["dot"], printConsoleTrace: true, }, diff --git a/vite.config.ts b/vite.config.ts index 1f7d843..d3df796 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -62,7 +62,7 @@ export default defineConfig({ name: 'browser', setupFiles: ["./test/setup.ts"], bail: 1, - retry: 5, + retry: 0, printConsoleTrace: true, browser: { enabled: true, From 37ddad82d31ae49b8a42671ef1a8c7431af5dca1 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 13:42:18 -0500 Subject: [PATCH 02/23] working on attachments --- src/LLM.ts | 2 +- test/attachments.test.ts | 58 ++++++++++++++++++++++++++++++++------- test/taco.jpg | Bin 0 -> 4525 bytes 3 files changed, 49 insertions(+), 11 deletions(-) create mode 100644 test/taco.jpg diff --git a/src/LLM.ts b/src/LLM.ts index cce9952..97cc613 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -48,7 +48,7 @@ export default class LLM { this.service = options.service ?? (this.constructor as typeof LLM).service; this.messages = []; - if (input && typeof input === "string") this.user(input); + if (input && typeof input === "string") this.user(input, options.attachments); else if (input && Array.isArray(input)) this.messages = input; this.options = options; this.model = options.model ?? LLM.DEFAULT_MODEL; diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 4ea4b84..f3e78d7 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -1,11 +1,11 @@ import { describe, it, expect } from "vitest"; +import { readFileSync } from "fs"; + import LLM from "../src/index.js"; import type { Response } from "../src/LLM.types.js"; import currentService from "./currentService.js"; -const taco = `/9j/4QDKRXhpZgAATU0AKgAAAAgABgESAAMAAAABAAEAAAEaAAUAAAABAAAAVgEbAAUAAAABAAAAXgEoAAMAAAABAAIAAAITAAMAAAABAAEAAIdpAAQAAAABAAAAZgAAAAAAAABIAAAAAQAAAEgAAAABAAeQAAAHAAAABDAyMjGRAQAHAAAABAECAwCgAAAHAAAABDAxMDCgAQADAAAAAQABAACgAgAEAAAAAQAAAUKgAwAEAAAAAQAAARmkBgADAAAAAQAAAAAAAAAAAAD/4gIcSUNDX1BST0ZJTEUAAQEAAAIMYXBwbAQAAABtbnRyUkdCIFhZWiAH6QAGABYACwA6ADNhY3NwQVBQTAAAAABBUFBMAAAAAAAAAAAAAAAAAAAAAAAA9tYAAQAAAADTLWFwcGxSi53gmVFc+zG5zZ0lH9NFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAApkZXNjAAAA/AAAADRjcHJ0AAABMAAAAFB3dHB0AAABgAAAABRyWFlaAAABlAAAABRnWFlaAAABqAAAABRiWFlaAAABvAAAABRyVFJDAAAB0AAAABBjaGFkAAAB4AAAACxiVFJDAAAB0AAAABBnVFJDAAAB0AAAABBtbHVjAAAAAAAAAAEAAAAMZW5VUwAAABgAAAAcAEQARQBMAEwAIABVADIANQAxADUASABYbWx1YwAAAAAAAAABAAAADGVuVVMAAAA0AAAAHABDAG8AcAB5AHIAaQBnAGgAdAAgAEEAcABwAGwAZQAgAEkAbgBjAC4ALAAgADIAMAAyADVYWVogAAAAAAAA9tYAAQAAAADTLVhZWiAAAAAAAABvogAAOPUAAAOQWFlaIAAAAAAAAGKZAAC3hQAAGNpYWVogAAAAAAAAJKAAAA+EAAC2z3BhcmEAAAAAAAAAAAAB9gRzZjMyAAAAAAABDEIAAAXe///zJgAAB5MAAP2Q///7ov///aMAAAPcAADAbv/bAIQAAQEBAQEBAgEBAgMCAgIDBAMDAwMEBQQEBAQEBQYFBQUFBQUGBgYGBgYGBgcHBwcHBwgICAgICQkJCQkJCQkJCQEBAQECAgIEAgIECQYFBgkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJCQkJ/90ABAAH/8AAEQgAVwBkAwEiAAIRAQMRAf/EAaIAAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKCxAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6AQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgsRAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A/vAooor0DnCiigDPAoAKK47x98QvAnwq8OyeLfibrNl4e0uHhrrUZ0togeyhpCMsegVck9AK/LL44/8ABVVdP16x8B/so+A9R8daxPNDLcXurpLoei29hvxLK1xOhuiXAKwFLVkZueVHPyXFnHuSZDR9vnOKhRja/vyUdF2W7t5I6sPga9bSjBy2Wib32Wh+vvSk3p6ivwq8V/tQ/tdeNpJRdeMrLwrbOfkg8MabF5qKf4WvtU+1tJj+8ltB9B0Hmcnjv9oJGaez+LHjOKZgMP8AbbOQDGMYjksWi7f3MHvX8t5j9PLgSjW9lh/a1Y/zRppL7qkoS/8AJT9gwPgFxDWhzyjGHk5a/gmj+iOivwe8J/td/tm/Dt1MuvaR8QrNB81rr9mumXj/AO7qOloIl/4Hp7/UV+iH7Of7cXwu+PusL8P9Ts7vwZ428t5P7A1jy/MuI4yQ02n3MLPbX0QA3HyX82NSvnRRE4r9q8OPpAcKcVS9jlOKXtf5JLll8k9/+3bnxnE3h1nGULnxtG0e61X3rb52PtGikHtS1+znxAUUUUAf/9D+8Ciij6V6BzjWZUUu5AAGSTwAB/IAflX5FftSftrf8LR8N6z8D/2abmSNddtrjS28XxSNGsZmRo3bRVT57mWPnF2SlrGfmRpypUeP/wDBQj9sLRPEuqar8GLLUfsfhDRZGttZJyi6zdR/6y08wH5tPtmBjuI1/wCPqcNC37iKVJfyk0yaD9p/UdQ/4Q7xLcaVc6FHcwi/iimNvBPPboFSUR7cq8UnlgAqNrMFHHH+fX0jPpT4zCYyrw9wxP2Sp+7WxTjzxpvbkpx+1JOyk9o6pJy2/RfDnD8LVM1w2D4hxLXtW48tNc3JZfHWs17OC0v9q3vJWQ79l34ReLvBviuGHxdqSatqOmQzWuuwfaXubjw3eMiXFtFGbuSdwby3dS8luy/MMPu6j9B1WNIRDANqD5sHkk/3mPdveuK+F/wu8P8Awm8HW3hLRkDzrHEb+9bLT311HEkLXNxI2WkkZUVck8KoAAAAHflSK/xx4w4vlnmcTzavNzeiTfZdbdPJbqNk25czf9j+GPB1PIspp4Gnolra97X8+/V9OZvlUY2iqDgjG2mEYq6yk1XdX3AY+WjC44/RYysVWTJwK5Txh4O8O+O9Cfw74ngMtuzLJG0UkkE8Eyf6ue2uIWSa2uIj80U8LpLGwBRgRXWo6yZ29AcUMpr6nLc3q0Ksa1GTjKLTTWjTWzVtrBVpwqQcKiunpbofoB+xJ+1f4r8T69/wzj8dbk3viO2tWutE15xHGNcs4f8AWxzIm1U1K0XabgKqx3ERFxEFxPFD+mlfzJ+PbvWvDmiD4g+FCU1zwjMmv6XIv3hdadmUID/dniEltIP4opXXoa/pT8O69p3irw9YeKNIObTU7aG7g/65zxrIn/jrCv8Ab76JPjXX4wyCcMwd8Rh2oyf8ya9yXq7NPzV+tl/BvjNwJSyXMVLCq1Korpdmt16bW9bdDZooor+rD8gP/9H+8Cvl/wDbA+Ll78H/AIH3+peH7kWmuazImj6TKesVzdBt04HGfssCy3GP+meK+oK/ED/grhd6n4u1zwx8NdP1W40mGw0y71KdrURl3e/mSxjX5wdo8mO4GVAOGIBxmvzTx344fDfCGPziE+SUIWjK1+WU2oQdktbSknbyPV4ZyueMx9LD06fO2/huldLVq7sldK1+h+SPw8+EnwiubrV7mxkutWv9LuY10b+2pzLYwG2XfHFLNK5luwXYm5UNk7OeSxPcfDO21j4dftKXXgbwRYHTfD/jVLzxLqsMg3PHc2wtokW1mibYLNiSqQuDlSSoQhs/mB8a9JvvgxrGn6BrHiaS60W+DSQLJEEktXB2v/q/3YVh/FgHrwa+2v8Agnd4l1Hx74r8U6vf6k+pw+H9NsdKtWk5aMTSyzuhYKAflSPA7DHA7/4u8Y5bThwbic+df2qcOX327vm5YKSi1upNS9ddbu/7z4D+CGf4mviOKq1bBUcPha0ovDxm/b3cWly8sXzpe1V1KaTULcqjDlP1D4bO3txUYiCuzdc1dCoV3R8A/hUKtHMokiIYdiOlfwthsdbY/rBSuUmhXzPM9sYprABT2FTTeTE4mmIHRRn3p7ID1r6HDY7Y0UrGXAwmhEyDaD0FMeImUODgAdKvSIXBjXr04/8ArVz/AIbl8XfFTxfcfDT4DaM3i3XbJvKvWSTyNM0ttu4f2nqG147dsEYgRZbpgQVgK/MP0ngjhvNc+x0cvybDyq1ZbRir6d30SXVuyRwZrneEwNB4jGVFCK7/AKf5I5n4j+JtE8IeANZ8R6/KILS3tZVYn+JpFKRxoOrPI5CRouWdiFUE4Ff0B/s46FrXhb9njwF4Y8SQPbajp3hvSbW6hk+/HNDZxJIjf7SsCD7ivm34CfsGeBPh3rmnfE/4uzx+M/GmnHzrOeWHy9N0qVl2ltMsmLhJNuV+1zGS5ILBXjQ+WPviv9w/olfR8xvA2X4irmlROviOTmjH4YKHNZX6v3ne2miSutT+JfF7xGo59iKccLC1Oneze7vbp0Wmn6bBRRRX9cn48f/S/vAr8D/+Cnt3FZ/tBNNcNsjj8K6U7E/dCi/1PJ/Cv3wr8Kv+CyHgrUpJ/DvinSULf23oOtaFtXAaW8tPK1WzjBJCgtDFegZxX8+fS7ySWYeHmYYaP/TtvyjGrTlJ/KKb+R9PwHmM8JmcK9JXklKyXV8klFL1dkfzgftPfF34MHwLq2rJIl1rTJFb26XLvEhVu4QAnaBnIVQ7dsdR9Jf8EZvF/wAHfEv7PXiO1+GJvXu9K1v7NrD3ibN1yYVkjMa9l8pgCMcGvxD+OFro2oeJ7yX4mafqFrFpt1ElzZ3NvsZEMeYwXz5bQyP8g2sc/TNfuT/wS4l+GWgR638N/h2rw/8AEp03U9QszCqQ2l60k0UkKOoAf5drE8kdyeK/y58eMkw+F8OcTChzS1pyvdcqipw276NWtZJP0P036LXh9iMlwTxOLVqtV3lzX532W9kr3eqve6tofreIkQYQcdcVWit44FKR5wSTz71pYRxujOR7dK5bxB4i0fwjp1z4i8X30Gm6bCyJ51w6om5yFVc9S7tgIgBZjgKCeK/zHyqpWrzVGirydkkt29kkluz+wJVIxjzSdkjRuI4so0gztPy49fwrk/EXjPSPD2qWPhtY7nU9c1XjT9G02B7zUrzBVSYLWLLmNSw3zNshiHMkiLzXvHwp/Zn+P37QaRaqYZ/ht4Tlwy6jqVqra7dxkZDWWmzgpZKc8TX6mTj/AI9OjV+qfwN/Zs+Dv7O2l3Nn8MdJEF5qOG1HVLqRrrU7916Nd3ku6aXH8KZEaD5URVAFf6g/R9/Z68QZ5GnmHF0nhMP/ACf8vpL02p/9vXl/c6n4Xxp47YPBXw+WL2k+/wBlf/JfLTzPzU+En/BPj4rfFaVfEv7T2oSeEdAnIf8A4RLQ7of2hcLg/u9V1iAjyl5G6204rgjDXcqMVr9cfAPw98CfCvwhY/D/AOGmj2WgaHpkYitLDT4Et7aFB2SOMBR7nqepJNdhRX+x/hz4WZDwngVgMhw0aUNL2+KVuspPVv1emystD+VOIOJsdmlb2+OqOT6dl6LZBRRRX6CeEFFFFAH/0/7wK+HP+Cinww1/4kfsra5qfgnTpNW8R+DZIfFOk2VuFM93NpRMs9lDvKr5l7Zm4tVyQMyjJAr7joBIII4xU5zlGHzDB1cBi43p1IuEl3jJWa+7QWFxE6NSNWm7OLTXy2P85r9vm7+D/i0+EtOtNWh1F9Xtje6MpVpLK+0y+RXtbmSSPHyMNvljfgksMHBr7A/4JB/BNvBFlr3iaK+t4YxbR6auk2dsI41PmCVrrzCS7PK+5SCByDjNfoz+3z/wRG8XeNvHF3f/AAM0NPFfgLWZ7y6/se31z/hH9f8ADs2oSG4vbfSLuaGa1utKu5yZhZXDwNZSM/2eQwskMXrv7JX/AARa8XeEfh5a/DL4qa5L4J8Ds7TajoHh/V7rUdf1piCmNV8USLBLDF5e1Tb6bFEyhQi3Xl5U/wCXHE30TuK6+RT4EwK/dydvbTcPZKHPzc6UXzqfL7vs+S6k37/s7N/1HgfF/K4w+u1V79tldO/bs153WnS5Q8I+IfGnxz8aah8KP2U9Ih8XaxolxHaa1qlxM9v4f0R25eO+v0STzrxI/m/s+0ElxynnfZo5Flr9Wv2d/wBhfwF8HtVtPiT8Rr5/Hnj6BPl1m/iWK2sWIO5dI08F4bBDuI3gyXTrgS3D4GPrD4d/Dj4f/CLwPpnwz+FeiWPhzw7osC21hpmmwJbWltCgwEihjCqo+g5712lf1Z9Hn6HfCHh3RjVwNL22LtrXmk5f9uLamvKOttHKR+LcY+JWZZy3CrLlp9ILb59/y7JB70UUV/Vp+fBRRRQAUUUUAFFFFAH/1P7wKKKK9A5wooooAKKKKACiiigAooooAKKKKACiiigD/9k=`; - -// read from file & attach +const taco = readFileSync("./test/taco.jpg", "base64"); // document in readme // pdf / remote vs local @@ -40,14 +40,9 @@ const taco = `/9j/4QDKRXhpZgAATU0AKgAAAAgABgESAAMAAAABAAEAAAEaAAUAAAABAAAAVgEbAA */ -// llm shorthand -// llm instance -// stream -// message history -// image from file // image from url -// pdf from file // pdf from url +// pdf from buffer describe("image", function () { LLM.services.forEach(s => { @@ -57,7 +52,7 @@ describe("image", function () { if (currentService && service !== currentService) return; if (service === "google") max_tokens = 5048; // google returns no response if max_tokens is hit! - it.only(`${service} base64 image instance`, async function () { + it(`${service} base64 image instance`, async function () { const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); expect(tacoAttachment.data).toBe(taco); expect(tacoAttachment.contentType).toBe("image/jpeg"); @@ -78,6 +73,49 @@ describe("image", function () { expect(llm.messages.length).toBe(4); }); + it(`${service} base64 shorthand`, async function () { + const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + const response = await LLM("in one word what is this image?", { service, max_tokens, attachments: [tacoAttachment] }) as string; + expect(response).toBeDefined(); + expect(response.length).toBeGreaterThan(0); + expect(response.toLowerCase()).toContain("taco"); + }); + + it(`${service} base64 stream image`, async function () { + const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + expect(tacoAttachment.data).toBe(taco); + expect(tacoAttachment.contentType).toBe("image/jpeg"); + + const llm = new LLM({ service, max_tokens: max_tokens, stream: true }); + const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as AsyncGenerator; + let buffer = ""; + for await (const chunk of response) { + buffer += chunk; + } + expect(buffer).toBeDefined(); + expect(buffer.length).toBeGreaterThan(0); + expect(buffer.toLowerCase()).toContain("taco"); + }); + + it.only(`${service} image_url`, async function () { + // const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + // expect(tacoAttachment.data).toBe(taco); + // expect(tacoAttachment.contentType).toBe("image/jpeg"); + + // const llm = new LLM({ service, max_tokens: max_tokens, stream: true }); + // const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as AsyncGenerator; + // let buffer = ""; + // for await (const chunk of response) { + // buffer += chunk; + // } + // expect(buffer).toBeDefined(); + // expect(buffer.length).toBeGreaterThan(0); + // expect(buffer.toLowerCase()).toContain("taco"); + }); + + + + /* it(`${service} instance`, async function () { const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); diff --git a/test/taco.jpg b/test/taco.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d63f4ef3f9d4e76378b265e598a1d4bf5a2b6309 GIT binary patch literal 4525 zcmbVPc|278_y3HsQwUiS5!riUEG?FhEom&FP_mUV_H|^KdP=Ay`@TjfyRk*~C69V! z&sayKCP044|+zGDC^VF!NUD=?{lZ0G=>KUHX1u!UoI(B<6FE?6B?jC+VX8PKPE?%-e#Pks`g8hIEoCFAaM_(^36O(ha z2tQXE{w?GYS|tD&l&8fqyPq+WVrsjkQ2#nZLS#_)x847XSe=}G9icnh&`@&p^6`Ue zdJMuQfqq^zd=tVvK2TB+j;G=4KX4%pJN&?n-)%0K=|DDbAk69LV($du83@Zc{7ZlR zU)bHvpY~ju(6VQD_OLLAR(@y*fiplCoP&--zyc@&WuO2J190&__$Yn%(E*;o3j_fl zfCATn3-E)ywIGcba0AW|HUu8P5gY|_5LN`oXmM$^hI}FUUBllx&s+eGe+2;k=(o-x z1%TQxC{4m|op?3?9CrYy>GQJpvHu>9Ho?gBzSj_n->+fp+5j+2QK{=kpf-vFVCNB) zx|K_%?mUL#PXN&3K^+Hopq8Ks9dyF!=;+||a7gGG>1o2q#Q0s9{t@Qy!a@_aUjn0f z!Ql+h$;Qmc{7e5|1L`Ey!A#T^z{L!#fD;@h2I#n8a4s0N0|u1?#_%J*pU3 zhzYuJ`%c*1@QBEJv2pPUiAl*Rskx8y@}CqG7Co)3!dKVS*4006Ywzgn>VDbt*TCS= z@W|-fF#>V+-Q4{9g%2MWSJ&3Rd?jyCzHLIg_y-+m{=w*9e7GP!boBIadL|kl7+oMV z;9T?!q6&=rv`m@oy}88{Z!z;|$K+JBvK&U3k$4?^`dRmjD-k7DX_US*`tJz+`9CrG zMd%kFDgm&=VbBYMa{*1TOOndH_W{8wmCa6)0=ac7B3NybWLw$Zp4sWh=dY8(@pTw< ziJo6ggm4SrVev1lib#9AL^~ne%+qHDOCuWaop>x$A!X@eri*jki{Sxd4&B=F=}2)@ zJ`%sLg^R_;A#eR9@7%R9vQ5Gux&EOT&tp&gzyHyJjdu%eA?iU z718P*6}XBJmbwPc9&L1XCfZ!r6c^?nbX+$@ItCW*A=ogs<~= z_cX5=i}WAlrKl4%2`l8+lz>I&QG9fSrHcmoa_DQn-fk)g*g9`2NChp$c8juKbjZFt zbxnNOkP$xOZo&1ynZ5%-o|0t7iurFzTwMl=W432rdj|1$A>*gqT=mn0oMxLw?{1%d zS=IfC#2@xqGu9~4PJDw3YCSOZS$nVXGrW$-ciFHmYxm5E#=GPR4^@+|NAQ^H0vz=X z?Uh95`w2u{yI{6_8aTzZ0Nf$jHCb}rPIWWUh2nXMBC@44(IdE2Gb(6NzYnIKc3){ zbrlXrb3T5knWd(;isaJaGMDag{xqcYEW-P}39grUr${m{vwv88zP6twWUAfZs2OmbY)2n^etqim&2xUksaiOP&V>M8!2U z;3`O6NcJiiyPT4dji)C?-Ogw_L2689FLM{cp8r{K-f*DR1fzzvw3SAb;?8GJI2 zkpjc9fjOGtSR^cQGQ}w>H)bZ2C2rZ6zMm(N<(*o$(z#f@7T&Aq68%ev+U(;w@uNj| zJO7S>?e9+ulX=J3PE#2{WUC(8W@DEI%47aQ(WgVLv+lc6Ho4DZFQ)tWQ{mKh0 z{G)P2D!3%{G}&TE-U^j2^Ast--Nh*J@Q=Z>yH4A|fh8ET6|GU3(?XW?980_ukG$$~v`w zmn1t+1>^A9`4uSMt&Pp3@MYa6^~En!HR7a~cwfIck-Qf{##kDzQo*^UMb_=pRPblr z<;BpWqiPDIQRJ|PQnH&;?~Ahm)y$Wtb}g#3*5UXd!{aYo=WgB@eYHA;S;O8INY=Bh zeBjQ9CYZT$1=tXbPaV8;ZU4mw7AH-@5xbXpKbMq#u|B+}Ph4n2-my}(e)0N2l6CcN zlCR*0>2dQEx2r1cKKkO7nkhbP`#ivjQ$cgF!O?dE2;J>v1hb`5*KFTkLRa zdr05pXZvZeCAfv&Wtp74tl!-vyL!G1SENzE5&mWTpv-XI{9|rIx#GN=nqlVf@^0bg z&Tn0UT*Sq11vTWzwyE{NK)ksr+Fw@Fd6EV~zDjOrxTQG!C2!_BM_9vq9B%_)Vde9~MrzAk+UU*C4|F?ZX6$#mR*wmiScO zb+tY>htw7Z^)O@AIPS*ei)S9nkt99nGZ}&II-*+&FWT z9X(pW+eh)RrtEtKt(>}KAO9DAw;Zfea)z$QeQ*%fDW7CaEGy;Z1Y!&P+td*+a;5e#8@?}9%{;v z|DyP52NiG|up%_Ys@yeQ&_ebG2Fzd`T{JH(x4WKSbdU?T{fy{h+PN%Xg&G_)o}5)<68&v)Y=m zvZQCbY@#zIXAQSc&1Oij;oLu*90{xT613?>%9>R_;J(SGd+@D5XK2XmGQ%vz=zyN) zgy(>=sB}ob@piPiXvf*RD!hlv#Pgje*YHWxPPpSD_>Io#+e|IbYl>qfM({nnFP_^r zbcET|nXnKTF|7RLZtG2>UFO%bRB*dVVkfgvA5H3*dNGwyvXgc(xMS@3Tt}szi%jD?kS^H1ae5;VbCMjDD6~Q$b$%TDb0V!WDdMx46My0GEVPM;57BtxXGGMVSy;SMvi&M*dFm zIkh3eiTy`HRKpt8t&X;z9R5l@sa*+$R$Ind6$&?0H3G)KEZkU#u)w=M0 z3z;7CC#93ciJ!>^JGXiy0(14EI=r0)wK59kyGwjR#a1(8h@Z{6+tCOAE+}jU757Z2Y3HAxv`XEG4%4jC>|P=45gR^-*j6E>*EtXQ%bE~APdNGG z%aeKVO!(TTcF`2_hvCfvk}BDaa%$@oDo_Q-?ia)G{_))*f1@J7CG%$g4;~(QuPPlW z=3+`hVudYbiy|CN(U(e{a_;wyx4+p%ehZTLh}Q_+*tn6!mf<}m!g1rGM4tu~IODQ< zk%)D3qU1LByV=@KlvPJFo}43j$B*SDj&cTns*I9sdz7Q6aPiT*-YY4!<%j9*OP$Ot zn{Bah(nd=b>$a#Pi3*5MqMfr{Za6u}8tu`budNAVa*l$&7QU)4mFG5BP<+FD8t=20 z*I#BhukPNk9yhs7{^%5Hk$er4?v6x9W+Arua>r=BuI&ZQr$wlPP`eZRC{hLOdvLS* zpGrzyLsE< zpwoWQ9^FMsx3ykQt+MknE0XbzGH@AA{U)-YyoH_I7Fk7+5;IWGbPAiw+&ul9Wd<(U zp^{z-5`u))uzBupReg-uz0jXHDB>cmDkR;+7qc#9CBz3d`cF#ZuDj276<{7sc=iOK z4#;T1bGp!9u9Lq(|1F0x4P8^m8=5HsIR*E&8H)RY{CA=~OL1+QWt0GSZ5`zaLFZQ$ zMxMS;CSIM(9S8_35tpAk5t%5p6G%P_`O9M(+B7-jLj|xGMkPqdTVn)w{nMn~GVj8r z%|T9c!K8@G_K8w8?!tOmLT)+QC0(zIc5Pm5OZLgk#}ToL8wYBuv;L?uS!h0KxW4YQ z)tKJQN0_U^6~1}L&g1wsr7Uz@c6zY7ab>Mur%gk2wV+#5&{c$0(^*|igbyt!(Gie3 zQsj-Wq2SI0%*QUTa&3?2A=5sov>ARws=n0UXSF!`X=RIz!Zt&GhFX=JRbHp8cfXPB zEuQYEu(?>l!!QipzhP5jpWyh~d(sbl{K!d#(a literal 0 HcmV?d00001 From 79a7346eb18dcbbcddf0d28dc8505903c9b38544 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 14:01:38 -0500 Subject: [PATCH 03/23] added dummy pdf --- src/Attachment.ts | 10 ++- src/LLM.ts | 6 +- test/attachments.test.ts | 181 +++------------------------------------ test/dummy.pdf | Bin 0 -> 13264 bytes 4 files changed, 26 insertions(+), 171 deletions(-) create mode 100644 test/dummy.pdf diff --git a/src/Attachment.ts b/src/Attachment.ts index 8e4de3f..6a03924 100644 --- a/src/Attachment.ts +++ b/src/Attachment.ts @@ -6,4 +6,12 @@ export default class Attachment { this.data = data; this.contentType = contentType; } -} \ No newline at end of file + + static fromBase64(data: string, contentType: string) { + return new Attachment(data, contentType); + } + + static fromUrl(url: string) { + return new Attachment(url, "url"); + } +} diff --git a/src/LLM.ts b/src/LLM.ts index 97cc613..e165271 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -426,7 +426,11 @@ export default class LLM { if (messageCopy.content.attachments) { const content = []; for (const attachment of message.content.attachments) { - content.push({ type: "image", source: { type: "base64", media_type: attachment.contentType, data: attachment.data } }); + if (attachment.contentType === "url") { + content.push({ type: "image", source: { type: "url", url: attachment.data } }) + } else { + content.push({ type: "image", source: { type: "base64", media_type: attachment.contentType, data: attachment.data } }); + } } content.push({ type: "text", text: message.content.text }); diff --git a/test/attachments.test.ts b/test/attachments.test.ts index f3e78d7..81eec42 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -8,39 +8,8 @@ import currentService from "./currentService.js"; const taco = readFileSync("./test/taco.jpg", "base64"); // document in readme -// pdf / remote vs local -// image +// all services -/* - - { - "type": "image", - "source": { - "type": "base64", - "media_type": "image/jpeg", - "data": "/9j/4AAQSkZJRg...", - } - -{ - "type": "document", - "source": { - "type": "url", - "url": "https://assets.anthropic.com/m/1cd9d098ac3e6467/original/Claude-3-Model-Card-October-Addendum.pdf" - } - }, - - - - "type": "document", - "source": { - "type": "base64", - "media_type": "application/pdf", - "data": $PDF_BASE64 - } - -*/ - -// image from url // pdf from url // pdf from buffer @@ -53,7 +22,7 @@ describe("image", function () { if (service === "google") max_tokens = 5048; // google returns no response if max_tokens is hit! it(`${service} base64 image instance`, async function () { - const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + const tacoAttachment = LLM.Attachment.fromBase64(taco, "image/jpeg"); expect(tacoAttachment.data).toBe(taco); expect(tacoAttachment.contentType).toBe("image/jpeg"); @@ -74,7 +43,7 @@ describe("image", function () { }); it(`${service} base64 shorthand`, async function () { - const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + const tacoAttachment = LLM.Attachment.fromBase64(taco, "image/jpeg"); const response = await LLM("in one word what is this image?", { service, max_tokens, attachments: [tacoAttachment] }) as string; expect(response).toBeDefined(); expect(response.length).toBeGreaterThan(0); @@ -82,7 +51,7 @@ describe("image", function () { }); it(`${service} base64 stream image`, async function () { - const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); + const tacoAttachment = LLM.Attachment.fromBase64(taco, "image/jpeg"); expect(tacoAttachment.data).toBe(taco); expect(tacoAttachment.contentType).toBe("image/jpeg"); @@ -97,142 +66,16 @@ describe("image", function () { expect(buffer.toLowerCase()).toContain("taco"); }); - it.only(`${service} image_url`, async function () { - // const tacoAttachment = new LLM.Attachment(taco, "image/jpeg"); - // expect(tacoAttachment.data).toBe(taco); - // expect(tacoAttachment.contentType).toBe("image/jpeg"); - - // const llm = new LLM({ service, max_tokens: max_tokens, stream: true }); - // const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as AsyncGenerator; - // let buffer = ""; - // for await (const chunk of response) { - // buffer += chunk; - // } - // expect(buffer).toBeDefined(); - // expect(buffer.length).toBeGreaterThan(0); - // expect(buffer.toLowerCase()).toContain("taco"); - }); - - - - - /* - it(`${service} instance`, async function () { - const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); - const response = await llm.send(); - expect(response).toBeDefined(); - expect(llm.messages.length).toBe(2); - expect(llm.messages[0].role).toBe("user"); - expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); - expect(llm.messages[1].role).toBe("assistant"); - expect(llm.messages[1].content.toLowerCase()).toContain("blue"); - }); - - it(`${service} instance chat`, async function () { - const llm = new LLM({ max_tokens, service }); - const response = await llm.chat("in one word the color of the sky is usually"); - expect(response).toBeDefined(); - expect(llm.messages.length).toBe(2); - expect(llm.messages[0].role).toBe("user"); - expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); - expect(llm.messages[1].role).toBe("assistant"); - expect(llm.messages[1].content.toLowerCase()).toContain("blue"); - }); - - it(`${service} settings override`, async function () { - const llm = new LLM({ service }); - const response = await llm.chat("the color of the sky is usually", { max_tokens: max_tokens }); - expect(response).toBeDefined(); - expect(llm.messages.length).toBe(2); - expect(llm.messages[0].role).toBe("user"); - expect(llm.messages[0].content).toBe("the color of the sky is usually"); - expect(llm.messages[1].role).toBe("assistant"); - expect(llm.messages[1].content.toLowerCase()).toContain("blue"); - expect(llm.messages[1].content.length).toBeGreaterThan(3); - }); - - - it(`${service} extended`, async function () { - const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service, extended: true }); - const response = await llm.send() as Response; - expect(response).toBeDefined(); - expect(response).toBeInstanceOf(Object); - expect(response.service).toBe(service); - expect(response.content).toBeDefined(); - expect(response.content.length).toBeGreaterThan(0); - expect(response.content.toLowerCase()).toContain("blue"); - expect(response.options).toBeDefined(); - expect(response.options.max_tokens).toBe(max_tokens); - expect(response.messages.length).toBe(2); - expect(response.messages[0].role).toBe("user"); - expect(response.messages[0].content).toBe("in one word the color of the sky is usually"); - expect(response.messages[1].role).toBe("assistant"); - expect(response.messages[1].content.toLowerCase()).toContain("blue"); - expect(response.usage.input_tokens).toBeGreaterThan(0); - expect(response.usage.output_tokens).toBeGreaterThan(0); - expect(response.usage.total_tokens).toBe(response.usage.input_tokens + response.usage.output_tokens); - expect(response.usage.local).toBe(llm.isLocal); - if (llm.isLocal) { - expect(response.usage.input_cost).toBe(0); - expect(response.usage.output_cost).toBe(0); - expect(response.usage.total_cost).toBe(0); - } else { - expect(response.usage.input_cost).toBeGreaterThan(0); - expect(response.usage.output_cost).toBeGreaterThan(0); - expect(response.usage.total_cost).toBeGreaterThan(0); - } - }); - - it(`${service} abort`, async function () { - const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); - return new Promise((resolve, reject) => { - llm.send().then(() => { - resolve(false); - }).catch((e: any) => { - expect(e.name).toBe("AbortError"); - resolve(true); - }); - - setTimeout(() => { llm.abort() }, 50); - }); - }); + it(`${service} image_url`, async function () { + const tacoAttachment = LLM.Attachment.fromUrl("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); + expect(tacoAttachment.data).toBe("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); + expect(tacoAttachment.contentType).toBe("url"); - it(`${service} temperature`, async function () { - const response = await LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service, temperature: 1, extended: true }) as unknown as Response; - expect(response).toBeDefined(); - expect(response.content).toBeDefined(); - expect(response.content.length).toBeGreaterThan(0); - expect(response.content.toLowerCase()).toContain("blue"); - expect(response.options).toBeDefined(); - expect(response.options.temperature).toBe(1); - }); - - it(`${service} temperature override`, async function () { - const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service, temperature: 0, extended: true }); - const response = await llm.send({ temperature: 1 }) as unknown as Response; + const llm = new LLM({ service, max_tokens: max_tokens }); + const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as string; expect(response).toBeDefined(); - expect(response.content).toBeDefined(); - expect(response.content.length).toBeGreaterThan(0); - expect(response.content.toLowerCase()).toContain("blue"); - expect(response.options).toBeDefined(); - expect(response.options.temperature).toBe(1); + expect(response.length).toBeGreaterThan(0); + expect(response.toLowerCase()).toContain("taco"); }); }); - - it(`anthropic max_thinking_tokens`, async function () { - if (currentService && (currentService as string) !== "anthropic") return; - const service = "anthropic"; - const options = { max_tokens: 5048, max_thinking_tokens: 1025, service, think: true, model: "claude-opus-4-20250514" } as any; - const response = await LLM("in one word the color of the sky is usually", options) as unknown as Response; - expect(response).toBeDefined(); - expect(response.options.think).toBe(true); - expect(response.options.max_thinking_tokens).toBe(1025); - expect(response.thinking).toBeDefined(); - expect(response.thinking!.length).toBeGreaterThan(0); - expect(response.thinking!.toLowerCase()).toContain("blue"); - expect(response.content.length).toBeGreaterThan(0); - expect(response.content.toLowerCase()).toContain("blue"); - }); - */ - }); }); \ No newline at end of file diff --git a/test/dummy.pdf b/test/dummy.pdf new file mode 100644 index 0000000000000000000000000000000000000000..774c2ea70c55104973794121eae56bcad918da97 GIT binary patch literal 13264 zcmaibWmsIxvUW%|5FkJZ7A&~y%m9Oj;I6>~WPrgfxD$eVfZ*=#?hsspJHa(bATYRn zGueBev(G*EKHr+BrK+pDs^6;aH9u<6Dv3$30@ygwX}fZ|TDt1G($Rqw927PN=I8~c_R69-cY5S*jJE@5Wr0JUS6u!J~3#h`{ZMo=LkbbALoD8vfgB}Fh|2>mhOnfS$3 zNV5}8Ox=$fj;C0=UKy*{myZZPRVS|0mqr-HxZAy;()@wxQ}MN`QWAZTXb3Z&Om9W2 zbnA^OWoQbAW|3W^fw#J;YzDato8*`rHQs+@W70D&SyT{wb`SN*3nI z5G%$wJlq932=n{60Eii*9H8dFih2ks?QY=>nAFL=5g^P@#b{YUEHt0S$D7WbX zx%TzvzIK%zpvzLEd9LNr0ch#LFf_(9 zEGt0C9v~%b54vynAc{~;v&2?S(-sTTft@9CABMNFZHtY1W0-99CEbUNfp_yu{LDBz z@8z^$LPN$wX4Hi+dZQs6K3QiKKF0}Nme@EII;;F}IplC(YvT*C3-Oh#(A}e5pIz01 zyR}D2|ftBF0T=1moHZy}$wS*PSCmSzHQ%x z2tCQQCx4jt7w1cuhY69~eH`31KC4)ZZJ^)f=IabocAkBPa zEeg25yPX&9-i_N(Qiq!I3RDrfx&0t^i)&MSQ1D(w%|%#LTNr>1cPiltAYO;6kBn(B?r11c^Bz~#)z5~~V+*`U)lDFtKbZ|;? z&4wTUtK=KE&uQIWUQv1mDE;LIhXXgx44PMa@%Z<7a& zx45^oYSnei^~%}`?!O-+cgfSmn_c?`=Gmm*Z^I(96ve&$zDs|)r84)IEEiE1kfQ$q zm3km*m1)PjdU9nkk9BTlidI1~M|O~WfP7AUu2T}d>5is9l$<%;7r2&Re06w>W$KM~ zqITBTd=Ln>^crw`_N?{ z;2d_=E0n!*NisQ|XYuX9q3+UcqdA(MC45|>2tz^c6HdZOmXTB?X2Elx@_0f)1z&-gS;UxN`>Ll-kWb0X0 zTrQis=w9sJ(q7k|@|k3SA~DJ@uMXP@4(Mgn+LJC+3F~3NHW71pIzY(aHg~{O+squi zWO_|F>78)L5*gcRXXRD9IzQ(ddSxh}E7(8sC~EYrOz$9BkSMBCkGGO9FuZ{#*mW+h zvwE7d)6Ag=a*R5URs>}qdqb_E6g)kN2Wel;pWe9=hZ)XvRZR!RQg&gxAPGj8J0!gR zrdV<2@MZQ?_Ocbd5@0zI?t>$z3eD80_h^{DI)H5lk`T4lbn8kteH3%fOBH^g26#lLN2&P^s zr&d05GDs)u_8OKzCgNxllk5pLC<2wKmghL{zW%}5^}%S$?d=3OzjaSzT3>uWYikZN z2ZcR7*L|%UMs|u)wMi7#vkN?cxlBcyAM80Tyzzv&zHMF1TH9?Mx5&E57P^)^zE5N| z^foq}!--if$Uj=U6Tc>EM!Pv)e^_SZSdvtQ=@>)(ONejQ!XW8u6>ESl<*s^6cH;Q1 z#n}nL{#|{l}}@td^zNSA;R{`3A&Jjr8L9(3^2FSyZ1W9$%;!XP#N2 z-SAzyRfxtgq^py7_3*GJFO%x_v<`xJ46`~S*IukgQDKfLxzFnS&GYL!1LA{I z!c#{A90{k(b*tUfbgjOH>}{#V;%^O+LUU<*#QkLtWzjho*Kb?Cr&wC38%wxpn}^Wy zG6EpV9x3xioCWA6H6=aE3)%jmZePu#Ji7wy0CmkDZNG`a{J1i-2`Bt&UrFb&<~V$^ zy9i`R1<35M&{mtCz144%v#7LKBTPPApjoV}#W-gDc5cn;A@Mbt#zXUK@J9^vj*ME( zo8(%K{c-KDr8n1-I&Mjn)*i|pF|7l*`fXvo8-z&j{$NOfUPM-xILbX1D29IHp|__B zL*JQ8*7-VrZVY*&$!PiE%zv@osg`qx0M8+w9iy7Az7;HYezs;5NRvrdNM~t@o}5Gc zjagk3Y_>6!Ct;ITqhu3FojJO^(^SG-($M4|frkp?4y-QoSmFcw9Z%(z?eC0kGi9@? zm(vAgXU|%!6_)CrnqYL-Hj@B5hA?#8C3G^cjd?0dMSZ!wbe%O4bWvlIG=nwOEInVj zhjzd`Bry8sXBTfIUr+juZH5JyE#7~UQiwR!gmG@wm}aNyo`13xEo)tzP64MWWG|j8 z8u8a2_=C2FdRZ9(eG&Au`@$mY9vvWldP-@wj5@38H0W2V8wnaQO?!)qoS_J=(ieoI zOvH}mkBRh_p1oTW66+?3u-GH2Ex~c=BQiwpJ zJlF7O2PBaCojRRL_mp44*Iq}vcRFpBD>V9M7do5{w&b;4^<_V~Vr{+O_&hz9k5Sm` zq3|%Z(6B5~wz2k0iH-QlafAa>1%ZebdxkR;6SdA?@dK|4Jf8PIO%64Fpw$6RYG2R# zX>Iq(xf`5Xk)79-@;BAQjlWu|w@Ss3sJv3Ew&%lBu-H?vYsC8XPJD!lkv*A~z_-k= zLOaM?B5}$Sf-KF5BWHoB51WFA{GlweQna618{*tqVn)YKUVq?khU_=QER9uW?N17xgAponbjg0W`=>f;sulH3?st)Y_@k$We2-__a>^{E78lUiI13qq!3# zwxMEl75MK1q`~J>ST#?`mUx#vr%-jwpZ+DV;W!0KNkZmO#sK)zt)H@`EQl6RRWhwb z0&E7|fG~@z)wlK1-RsxN#8Gr)D5=xpv=b}=CWPbwz@(9bIhD0Crd-Q>qEo>~Gh{X7 z77AK5>TfF0wK!?7Nx!<5uDy?D{Qg$SEc_R3J9EuH!Z@qmEJ*QRRHd3BPirM6783nv zAnab$>rhdDJ6pO@%Ox(}BYw{Ba<3|=A%Fg5_Hfxj{%CfzZCFO{?%h&=?%CNBvi&p; z(otqN>+5giLLa^*G?xzN30=IgQrV+r7dW4bX;zKtuD)O$UnwAKC?CpkPt{77nUArH ze-jKcCfRrOlp(Q^b&W}mrgt4n%wikNxeSBBE_n>K-IOIzi6!<)xGRYA)wGgqp^s@d46N#krDHPc#9SOgXhI7Vbj?B z%c6@8dCOGPYBoNE#3N7HD^ihbC9*xGm6chu;?fcuv)s01keHHZ1vXl5D;29O7wZBr zyPzyLZHKMtUI%PK+*X2zTFtaDzU1qn(H=hRRj-SoJw7I5i%4b0u=&InEAKgoae-lp zXk0SkjlJ52HruS*1QykTZ&aCN`PbcKuw$1st{peJ@&aF^aR@~{XA@L&YvK%+VU}G4 ze5iuesu&i6=*#nvHbm_v-ZLr5^Ij#|YSAper4XpsH;0x(2h1-tIobIy;0~2a( z!G($SB!iu#P;;hGeI~C`O=-3|d~zoB0!`*JrU-)Ko_X5#kSpy5o^z49RG;{j#l~45 zF?X9Ih4IdviT(8@+q|`BveLTprbESZ6^2I&ew|V3pDXRe9gSyXT)zzqKQ;gCD;p+( zM)2(;YJ%P5)X(N3ZSn>dn6UIcEcvQOXZBn}uD!7V0yXr$f+d@eTSYoquPit2S8cPW zA8t3dX)Cv{0cKF`@e|PP(xS0|z2_R0(P6)#+kC$0^5- z$7Hs|bOQanE z1oJ;uh(dYiDt}mVmtC3&HaGT6-dY429v#ySHJ7V)C8ow=PSmnEI)=b3_RJsU(S*+J zV$p3>RkK?DFvTc;(-T=h!1u~CP!pE=0eSSu#c@N7S0Z57CPg}!5z{QL#`2v?DJDt^ zCGN{0p-&&=)Sb28Xlo;ZXc^CGdwL9prf30uu$y5aPeWD6WIk4%%~DEhTiwOvy!rS% z&3z#DWo2qBA*=M2xIu=_R0sbrmP;Y?_rRa^k}3WYU6n9H^(})Zi-woMKKXfgbab@J zWx3DUr0MLpdDYk_LO8As}d*Z=x^K+uIv#T&SnY6&C$9 zBn1u`G#TBt+n5b%a;Cr0h^sm5Fl^OdxJ^8IebW);DWATq#Ba=#rggj*wNKy5NMzz& zBm`bk9bcSVPJbC`dHrI>o^=LSvTFpT`VAK`x_naOpvS~*l2$1vIk$avBA!|aeZ+7c z$_9Zzh>fc4$uX&w@-$VORCscG(B)OA@SPj>BNY3gxkkcPgNi9bE=?&3A4`3ekrdsb zn~`M;p8I>4?@@ZI{9Afv(tC@pp@Oe5BYUw-%&J_WaTBGls)&d8q?t$i<<@=_CNfH! z4H!ww7#gkp_^`bxZaJI9@C+A9x7@E1ZRoG5PL?w3GDi>`8Qq%I+0ygfT78%{Zt#mP zqX0CzaHKn@hAOQsv=^8UbfpuyFnT8Ht++Vmmx$~09!e{5t8fMkEjr~tfIxMlIpr4zGwvEIWKC2`Q#C)c7QF9wet?hE zLKoU?t@nqm=iBc` z8_((*(i(g}7z)3{%SJ!uya{?Ir-2^Fiap*VC4pF@N zpL5F*DG+(taLhdu4DbyAP(0&60n@%?G~hHugBI^-X6@_YOu}8UqwbQ8V`2vwDRLMz z)aRFo+r1f?5idT9xRF`cjgx$a-IpH3AH|bs$emw}d23*3aU0hYNh4(D0o-Z+wIX{d zeann?lzjgsAt62`er@<$`G755?i7tl%CHNgXp}#j>j&S1n5wZ;ofNbI>B2*4L1}@3 zq(LzPqn()w{KBsX!5*a&=dv<}t=R%II;TcQatbnKM7S4Q1PQIoT=^$#=>Y(m{mBYtl5W z6}|l4kxikOcJ`C3o{TSxIi?8|N6sH7Lkhq5qttl@uBTA|-cBluU$hU0&xYKvNidrL z4q>|j76}G1Db23Fa|XlFm%W&jW0h#7B$_FD-ZhqJ5#7i!0ZmCrereX z|Jlf`<1zR2akFe|boWv-r=}kM03o|%$mZA7Of2T99u~e56~6sh$P=yk9f!H6msn)n zvFOLF?W?iqi6fK9C)a42Sgt0kz4#M6 z-UY6451Er~=V;ITs1O-q*>}{;bs74MMZ(Z&=Z{5#q+i@cw^vI#0|Dh~-Dh-tn2I(S zTXXp-bLEG{p0#BbIqIcTM|DWZmr`&br8u)jQ`CR*^+g_fIX%=K+)x}F%Oak-Uh$6nIHUavnNV5M7YffU80QPRD%y>T{bIzn<6Rsy zb6cW6`?0EwSn;uJddPn@`?^Cry2s(6ccP1ykKr!kmDg2~zbTJq@+e(z5N>ZNr|8$j zPi-~ofp7E|Xx1#H+f@UR@AS}iLP!}}dRwf{u!avAq-_hNw#uaoOD{2jo*eRn8$~bDK`h1&ssOC6ekGV38+hU!KR z+kpnSzT;y#o|V2h|F?SY4-z1MFxz0;)@Lk`H>Cj zSl@fR%*@F79;HJcsX%L8_d!%TwmQyi$|n&C{oBMJ9~Xm!@@#lZdz(WB9SgJ#NIC%@ zy+~ZnI|4E`7f@W0Y9I@N7UTs1fTPD-ZiU%Lr2MnP+2h8AGh?(WGVf>h@W-_M>jRkD z(KNxvo(UJ7)o+*t%fCcM10;2XM$1NAFKwhp(c917^io_ynn-yv58IFIF*UJUw*2Ma zm?a-a1yp9B?WxpLzap-c^$HKkX_IfT_W8Lqaltl*A%vZSZWAe`Kv}vjz}>Tc;Hw9T zA+Nc49X&{WDmxY~ReV0YceXdL!$9mTL$Q@_vXIW6I{G=`$KR7jFcE&IsHwnKX;KldV#YL z(xwKAB5cFiz+r6m*5iJvo&E)XQqVWjmA}BfyVS&dm9&Y%$Sp^sW!JE3iI0v(kQHdo zmhWk|gC!e@CFKPv4BE*U;mYo0y}J0J-Fhu!c%v+paQf9+3Ed2EkfPt(D7|Ok#t)^PGr3Y)RGfvO=k;@Xry=Cf3fLCQ# zi`%oCt+vyB-t{iEgI&+2dczmnMXj>EOmSpMuuL8Ob`1$D;fc$wM6j2HH4Q$ zqaoj&M$2sLhpptdJMbs!krJId=iOd}HdP4Lt@yf42OZ{pOoQ4_gShz_sMoWYX}yQd zDQ8(tc7UvTt%`0#?9K!C^J>GpucEnBhnsWg102Z=uzOlwez^q^j7nV$krID#wC}A$ zcRfc2)T5Y~({6@1`{yL-Lzs;miT@C9|1SIFBMK7cz*E;v2H|EStZphjfb5mGMpw{q z!pl;Vw772tuvDH4o$;j4u8)@=m+&BIf4Ix(u75P?Q{4Y8^uvpq)mCW(enuQc)hx$B zOY{`_*%~bm%k*x6y;)D8_-yYbMsC8y#1H}89X;M=a#*HT>d*NFf}x$pQ&X?nFtvzA zKH|l8y;frsm|&}<%&*}Yu}Yn0M=Jy8qe%<1qXRR%Nut}Aqr+1pQS*D7Cp`+8Y`RO02p14DyVOmSYlEzZ;9&JzYhtybMZ%e4s zlks=V(+aJ!LK-()3ox`%9c)lx#3#y4{ulL6KpG|&>9`n?Uh#m3G-mZy-3h98Scyja zH^3Pb7?P z+2hAkyvg}g$#)n$Gs2fL19JNOZ|~>Nx(|}lmwesC!>?Y~72mpf4XZ8t^TIwbCk;i0 z+a2ymSZ^=OrtrSH!(y#Vn!8KWk#O7<1-!if+`dDDy18U7wS3k$lIeM}Z0fhYqI)+x zo*o4*S$S|hGf6vL>PaQ(OQ_%eskx-G-FV|dXHbTH<#w@RbeIx9I$d$xqHh`{*&d3y zevlYNk)}w@cuu4A$^DYJsOvO7VBaom@Rx@gb$V5IKJ{Xue16H-1H0j=U0brW-aVRG znWCQRkESBmD^4?a7mB@!jf2>(Hs=Bd-;XX1oEilevb9axB^NhIPLO>jl03S+Rw|fx z&oIsIk(~W!4$zzKF|uSR<@S#;{r;fKup)iDaxz_9JouroY>XHcrN(Mm@UHV?-8bCh zXGfY~7U`rCasv(h-R*ava)^ zF1`BMT*n3xQBTdM?`n&h2Ecf*XXuLo7Zyl_El(v~oh>}mK01$%0a@#uzyiX_g>Bav2XWwH%YekAxU%pBT!p*?%cS#zA zv;^eDC#KZP@7o=^GDc_V8<3w>`*L(+=A#(fcH)dGjqM}Vk_el+c>B`{9xm<>IZ-Zm zLL!-Yf*3nju_(8ZGUd9*K`iofWW+BYFnZF&+a|=yxqV?oUOcG#ulnSR$DMs|e5Tph%WW zVjzE3nMh7+rG!}av)+~;o$#+EHyPX zzOUO?^#)Jh*t^b7pTW+I%f;xy&JMPCO&5RR``BmHX-Mw{qoJp9BjKea$;A9%>-iEZ zvuUBm%0j5UWax~`ue!K6dDdip+zs3f{+qQKqH;9C(1Z@95()-Ew=`BdLh2VS3zI8qYGH&&7m9+vpUc+x8l!i-ATXKhw34XL2;ya_VIQz!OL^)8mtqnb?q=~&^h-$;Zn^HRZ2p(gH z39An;`AWT=i&VP0u&CUe7OYW51Icv=q%Vc7%Zm z_uAp9n}osEUdk2*pV)*i`WRSa-FWtCwGqS-75@K#V0)r;+0(0XVp9vnb7lWiMj!q= z>Zf(ioa@gSwA55Jil$lh)%4U<)$j@HTQU2KwuUUsZA*2O^QTKobak8g0Qb~ROMTW7 zfTF2yF*na6i(lQ*Nq^rPen^0>$$b`K!Kp{FVa-VF`kCiXZg0Vtr}i*rcpny_YOR!} z+?Jiv?dWlT`}o$s9Fxt%%684d7ek-q-Q~jS*I5+8HtvSw+Rp!D=+gVr!gqcYy9K74 z&eClx6f6{1Din;ynjz?XZlJ~W7^A@0wiHIt8$aou;f>MYpU%gUlDwAK*nX0#vHtyl z_C=B+ZkOffY|oR^2>(+IlZCTMFirZMhn>bqzR=38hvJpcM4-@gUYY7_k^G*FW9;5r zc9q4c>C?hd{uS3{MThN*(w!3e05e?bI#SNlo$U&%>((Dz0_JeqbG|}!wI$& z%q2JQ)Vas;i0RYqNXW!CC~QK%u$K$beGI zT2KuzMjus26(zmofK;m2gY%d*o~sHBKA#`RBNc9c*-GLmbgh?*9V;^TBSot2E%~Q5 zl+R!WA_h_JT;+irbJ#Z-tSy-;B^t&&dOSwPV(T!CB)no8Y4sP%k(MD^0P!NL1vK&7 z`3luW2$gkI#Zf>IZT2=m4R&e@d zeo#B=Q|9`w8}%|)f%GBjYO01&Dk5qjm$+#1yia#CE=Sh~88Vdp%|VU}0a6mF@JkhUY&~W3f#rHK-1Qdo z>0*z5?#-hQUY}k^X7~1bkI?($-~3#c3mF4Cl@2%|0@1=ARZ z^qlNaN63&>;O_~mmto}?tAhznb}p;GpyIq1Z^yf<_6Ui~cpbbP;uV7W!+ke>wYG-f zPPz2~%UgSs(>vsKFle%uo=WIDYz;BR!doAy)aQ0QCpE_Wz1XK+3Kpr=V_H8w zqzaizn9ALx#?fo-N)_CtENYH*1|ID|x=xa9d#;9~1Wgrcx^8=evrfky*Xj`269~A;kh^O|ewZnM}=SmM7NX=?h#jjLh&1kIT+A z)If4luYo@s+e_L&eRJ$gw1`)>u#efOq=M0iYIPS$GII0z`T56eNxK@~Y%*^~Q&w$1b)jM9Z~kuRc~YX`6r#ySCskW5cq|#a39s;ZiaL~OdEpgu z1k*sKkLZ&?6fAi=)77yKI1xii%)@DG8r}663xkJcwLTj?s`h{GP@_2}`A|;w7zrzk4QOQ*O$(e|M^<`vLD*1^i>Nr*= z+A`y@f{!zLi)ys9OrFM5`Qw0292Ciyq>zC>8(TkG1O;#UUh?#I08kuwpS_vhufJ0v&p^Yr`=^WG7!qVG(8n9u7=J64fr zQq7B|9rzl7s)I_|8UeVp?=cqGILQ}0O(n+^vJz=vFBU9JmG$=DWzi+qCHw@D0a7`M zA`%pmU8+8W{u0{2*^tg&3;I&i`4`{YJe_n8 z{viTJZL?$}#l9w${3mydrW>Z%nY!WXf$HJv5$Zw4F%7^mXWsZ-s&olv31;C*KlH)j z?j?Eika^cI`l>)WJ*ga?%>0HwJm{%<)OP8pdvwMG@fm;Ca`jfy7ixY-sic42*f&ld zJg3(O0~;=Zsp@cdUj@&Zj~#~LX=F5Ws@!Ik0-~(wlbJO6&)S~s6WrAW9lrQ%6+S03 z&P&xJ{;BC%2s%J#uxZy3=Fc}fkwE9(T}QAK9b{FT!L3^PQ~;#X$T|9v&JFq)ru$h|ls zvPxYyWT}V&Dol3#)t6pVE4nIClEq=r++eGcG-tkOW4{n$Ra~3z?`@_gXRUiR`SrhY4K z#>C+t>pNtm>!Zw*;p^qI0|g<)Ob`r0jaN6asw2ZGLT}bMbHnQ$OH8cR7{Rq?=4%&x z2Qe&O`w$~b%fuo>fkgT`PVx=uto@&SdDpIXL)<da|A*x(b?o zdUj^iN+B9%;2{1URo7=%m@r*RJi3fQNO_`AZY;b#tClm;A}NQF#!Y;pMMdh=^fO@9 z>J>Xv^joKJM>M7x=xh!oSLO3JlxVwTn$DPHdGsnkAvB)9d)IE6ZHgd1vd+Z;W1d682CBy4zti z&6;T6!rzSKIy&zKKfAx9J%7q-=Mac{u-_GIYEaZt*`h25Ne?ch`E_c2{pGA<;nVkx z102u6#||N$g5MhA{!rFwaI(;8$S{1DePGc^L~j6?Q$2QMIO09 zPdma#_kX(|;oOau(pX877ac9V4O8x3g{Mdbr6oS)7 zN0v#H_j!bhUNl;q>GrkeA~){;lCg@&Mg5(z%E1HV`d7{>_}@9JZ(VJn>=HKC4q{My zLpw8D2OD@&E}T?=SV7rE-XI?4H+E(aOI8sZOC$NW=!leE6MG6ycn2;fB4XpB!^#Z= zQ?P=-+!R0#4h{+c2LPbUF6{uZG&6i-ZDI+f;6P`8V{ZtxcA((p;6i6ds6r4x005m` z6k;m{H8U}FK+J;+syaZe)G2u2J;eI(G+`)^0+C~@0#BIzJLi_?-}e8NR15?I|34|k zx>2LneiYApj|7nW4k1sp9h-vz^G);Jq7ONB*clw!(IJ2QT3sYWS)>yb_Ual2Um3r5 zw706UJD48HLY73$&Gm=sl|EYND&Uk>VT!eN_p49f6HS<{TU>u{4&#WYh1dwy^E8il ziH`_=$2m8k)y$Q2yDZQluP+AZbND!Yi7Co@fwHnw2pV1bo*=wGx2n7Urt$y1@imz1&#&nK47Nw zT-dLY@^1NHY?5B#-Qf9?`lA_={@NnLpmwJGQG7&oU}0>) ziZ`GdjY(jIKi2Q?e+d=de}nq3pkP;ZG;lyf$Xh!{=x?qF#2$)p%>NM^W_I=tqNWf# zgv;e1fAtY=)-W@2FtyhKb8%3Bfj|mw00#vR4=)857d&XdU z(4fLD4>dA_AWjHkeJ)-u3LZ|NF1w_ijiW6*A6^xXD#Y5}7O{k(E4!#F{9rhl8A4Sg zMcAb&9N>rx39*a9v4(4~r$8jq|MLt0{*hTPYU2nu0sub&aQG~$!9>qU@%LGVw1{ZAdD5crj3WAdl2KV62-uIT7sX=aUZ*>8aV1F3(c z_P=p-FtxG!8!9*^U<3>RcoByeFaipAK|lhB5)AqaI)n^@hmeEwxOw0OKK@%C0pZ{C z5o^F{FbEE(DEt!$_$B<8DlYiaV7ME855ql#Py+_S#o(c8`L;d6lqRR~$cn(zq-4};(pf)4`xt=`PWS`7YO27?$MdgtpDP{`vCa4 z{2x3Z5bm@8-~oUj5Zv+q!Gl}N`CoDX0N4M*gTIpgb1nb?;)Y)s|FIqb0Ot6gw!m#h zTnhg~j+YZ2)c?r?0yzIm4hZ1=FTFrc;D6}=a`OJeW(PY6{AFi{I1;L6ZcsR+>?$@k z@FNVDLEL!K*2XpzfZwk|I3Y%%Lm?mm76XGtKw?0k2(JV$kO#;s#>p!o!6gRf5#f;l j@(7{-|3%=32kuUL2Z)`+Z(jm{U>-0!Ev>ks1p5C2Hj`#V literal 0 HcmV?d00001 From e75dfe3db3f0dfc9622ae2e7d981e862cf1a6fef Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 14:23:50 -0500 Subject: [PATCH 04/23] got attachments working with claude --- data/model_prices_and_context_window.json | 632 +++++++++++++++--- public/docs/assets/hierarchy.js | 2 +- public/docs/assets/navigation.js | 2 +- public/docs/assets/search.js | 2 +- public/docs/classes/APIv1.html | 4 +- public/docs/classes/Anthropic.html | 4 +- public/docs/classes/Attachment.html | 19 + public/docs/classes/DeepSeek.html | 4 +- public/docs/classes/Google.html | 4 +- public/docs/classes/Groq.html | 4 +- public/docs/classes/LLM.html | 4 +- public/docs/classes/Ollama.html | 4 +- public/docs/classes/OpenAI.html | 4 +- public/docs/classes/xAI.html | 4 +- public/docs/hierarchy.html | 2 +- public/docs/interfaces/AnthropicOptions.html | 34 +- public/docs/interfaces/GoogleOptions.html | 34 +- public/docs/interfaces/InputOutputTokens.html | 4 +- public/docs/interfaces/LLMInterface.html | 5 +- public/docs/interfaces/Message.html | 4 +- public/docs/interfaces/OllamaOptions.html | 32 +- public/docs/interfaces/OpenAIOptions.html | 34 +- public/docs/interfaces/Options.html | 34 +- public/docs/interfaces/Parsers.html | 2 +- .../interfaces/PartialStreamResponse.html | 4 +- public/docs/interfaces/Response.html | 4 +- public/docs/interfaces/StreamResponse.html | 4 +- public/docs/interfaces/StreamingToolCall.html | 4 +- public/docs/interfaces/Tool.html | 4 +- public/docs/interfaces/ToolCall.html | 4 +- public/docs/interfaces/Usage.html | 4 +- public/docs/interfaces/WrappedTool.html | 4 +- public/docs/interfaces/WrappedToolCall.html | 4 +- public/docs/modules.html | 2 +- public/docs/types/AttachmentType.html | 1 + public/docs/types/Input.html | 2 +- public/docs/types/LLMServices.html | 2 +- public/docs/types/MessageContent.html | 2 +- public/docs/types/MessageRole.html | 2 +- public/docs/types/Model.html | 2 +- public/docs/types/ParserResponse.html | 2 +- public/docs/types/QualityFilter.html | 4 +- public/docs/types/ServiceName.html | 2 +- public/docs/variables/default.html | 2 +- src/Attachment.ts | 66 +- src/LLM.ts | 11 +- src/index.ts | 3 +- test/attachments.test.ts | 37 +- 48 files changed, 812 insertions(+), 242 deletions(-) create mode 100644 public/docs/classes/Attachment.html create mode 100644 public/docs/types/AttachmentType.html diff --git a/data/model_prices_and_context_window.json b/data/model_prices_and_context_window.json index f7a56e1..9d72f85 100644 --- a/data/model_prices_and_context_window.json +++ b/data/model_prices_and_context_window.json @@ -451,9 +451,9 @@ "max_input_tokens": 128000, "max_output_tokens": 16384, "input_cost_per_token": 2.5e-06, - "input_cost_per_audio_token": 4.0e-5, + "input_cost_per_audio_token": 4e-05, "output_cost_per_token": 1e-05, - "output_cost_per_audio_token": 8.0e-5, + "output_cost_per_audio_token": 8e-05, "litellm_provider": "openai", "mode": "chat", "supports_function_calling": true, @@ -594,7 +594,7 @@ "max_output_tokens": 100000, "input_cost_per_token": 1.5e-06, "output_cost_per_token": 6e-06, - "cache_read_input_token_cost": 0.375e-06, + "cache_read_input_token_cost": 3.75e-07, "litellm_provider": "openai", "mode": "responses", "supports_pdf_input": true, @@ -744,10 +744,10 @@ "max_tokens": 100000, "max_input_tokens": 200000, "max_output_tokens": 100000, - "input_cost_per_token": 20e-06, - "input_cost_per_token_batches": 10e-06, - "output_cost_per_token_batches": 40e-06, - "output_cost_per_token": 80e-06, + "input_cost_per_token": 2e-05, + "input_cost_per_token_batches": 1e-05, + "output_cost_per_token_batches": 4e-05, + "output_cost_per_token": 8e-05, "litellm_provider": "openai", "mode": "responses", "supports_function_calling": true, @@ -774,10 +774,10 @@ "max_tokens": 100000, "max_input_tokens": 200000, "max_output_tokens": 100000, - "input_cost_per_token": 20e-06, - "input_cost_per_token_batches": 10e-06, - "output_cost_per_token_batches": 40e-06, - "output_cost_per_token": 80e-06, + "input_cost_per_token": 2e-05, + "input_cost_per_token_batches": 1e-05, + "output_cost_per_token_batches": 4e-05, + "output_cost_per_token": 8e-05, "litellm_provider": "openai", "mode": "responses", "supports_function_calling": true, @@ -806,7 +806,7 @@ "max_output_tokens": 100000, "input_cost_per_token": 2e-06, "output_cost_per_token": 8e-06, - "cache_read_input_token_cost": 0.5e-06, + "cache_read_input_token_cost": 5e-07, "litellm_provider": "openai", "mode": "chat", "supports_function_calling": true, @@ -837,7 +837,7 @@ "max_output_tokens": 100000, "input_cost_per_token": 2e-06, "output_cost_per_token": 8e-06, - "cache_read_input_token_cost": 0.5e-06, + "cache_read_input_token_cost": 5e-07, "litellm_provider": "openai", "mode": "chat", "supports_function_calling": true, @@ -2160,9 +2160,9 @@ "max_tokens": 100000, "max_input_tokens": 200000, "max_output_tokens": 100000, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 4e-05, - "cache_read_input_token_cost": 2.5e-06, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 8e-06, + "cache_read_input_token_cost": 5e-07, "litellm_provider": "azure", "mode": "chat", "supported_endpoints": [ @@ -2679,13 +2679,13 @@ "supports_prompt_caching": true, "supports_tool_choice": true }, - "azure/codex-mini-latest": { + "azure/codex-mini": { "max_tokens": 100000, "max_input_tokens": 200000, "max_output_tokens": 100000, "input_cost_per_token": 1.5e-06, "output_cost_per_token": 6e-06, - "cache_read_input_token_cost": 0.375e-06, + "cache_read_input_token_cost": 3.75e-07, "litellm_provider": "azure", "mode": "responses", "supports_pdf_input": true, @@ -4295,8 +4295,8 @@ "max_tokens": 40000, "max_input_tokens": 40000, "max_output_tokens": 40000, - "input_cost_per_token": 0.5e-6, - "output_cost_per_token": 1.5e-6, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.5e-06, "litellm_provider": "mistral", "mode": "chat", "source": "https://mistral.ai/pricing#api-pricing", @@ -4309,7 +4309,7 @@ "max_tokens": 40000, "max_input_tokens": 40000, "max_output_tokens": 40000, - "input_cost_per_token": 0.5e-06, + "input_cost_per_token": 5e-07, "output_cost_per_token": 1.5e-06, "litellm_provider": "mistral", "mode": "chat", @@ -4579,9 +4579,9 @@ "output_cost_per_token": 4e-06, "litellm_provider": "xai", "mode": "chat", + "supports_reasoning": true, "supports_function_calling": true, "supports_tool_choice": true, - "supports_reasoning": true, "supports_response_schema": false, "source": "https://x.ai/api#pricing", "supports_web_search": true @@ -4616,21 +4616,6 @@ "source": "https://x.ai/api#pricing", "supports_web_search": true }, - "xai/grok-3-mini-fast-latest": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 4e-06, - "litellm_provider": "xai", - "mode": "chat", - "supports_reasoning": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": false, - "source": "https://x.ai/api#pricing", - "supports_web_search": true - }, "xai/grok-vision-beta": { "max_tokens": 8192, "max_input_tokens": 8192, @@ -5812,9 +5797,9 @@ "max_output_tokens": 4028, "litellm_provider": "meta_llama", "mode": "chat", - "supports_function_calling": false, + "supports_function_calling": true, "source": "https://llama.developer.meta.com/docs/models", - "supports_tool_choice": false, + "supports_tool_choice": true, "supported_modalities": [ "text", "image" @@ -5829,9 +5814,9 @@ "max_output_tokens": 4028, "litellm_provider": "meta_llama", "mode": "chat", - "supports_function_calling": false, + "supports_function_calling": true, "source": "https://llama.developer.meta.com/docs/models", - "supports_tool_choice": false, + "supports_tool_choice": true, "supported_modalities": [ "text", "image" @@ -5846,9 +5831,9 @@ "max_output_tokens": 4028, "litellm_provider": "meta_llama", "mode": "chat", - "supports_function_calling": false, + "supports_function_calling": true, "source": "https://llama.developer.meta.com/docs/models", - "supports_tool_choice": false, + "supports_tool_choice": true, "supported_modalities": [ "text" ], @@ -5862,9 +5847,9 @@ "max_output_tokens": 4028, "litellm_provider": "meta_llama", "mode": "chat", - "supports_function_calling": false, + "supports_function_calling": true, "source": "https://llama.developer.meta.com/docs/models", - "supports_tool_choice": false, + "supports_tool_choice": true, "supported_modalities": [ "text" ], @@ -6677,6 +6662,47 @@ "supports_parallel_function_calling": true, "supports_web_search": true }, + "gemini-2.5-pro": { + "max_tokens": 65535, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 1.25e-06, + "input_cost_per_token_above_200k_tokens": 2.5e-06, + "output_cost_per_token": 1e-05, + "output_cost_per_token_above_200k_tokens": 1.5e-05, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_audio_input": true, + "supports_video_input": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_reasoning": true, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_web_search": true + }, "gemini/gemini-2.5-pro-exp-03-25": { "max_tokens": 65535, "max_input_tokens": 1048576, @@ -6719,6 +6745,137 @@ "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", "supports_web_search": true }, + "gemini/gemini-2.5-pro": { + "max_tokens": 65535, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 1.25e-06, + "input_cost_per_token_above_200k_tokens": 2.5e-06, + "output_cost_per_token": 1e-05, + "output_cost_per_token_above_200k_tokens": 1.5e-05, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 2000, + "tpm": 800000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_audio_input": true, + "supports_video_input": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_reasoning": true, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_web_search": true + }, + "gemini/gemini-2.5-flash": { + "max_tokens": 65535, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 1e-06, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 2.5e-06, + "output_cost_per_reasoning_token": 2.5e-06, + "litellm_provider": "gemini", + "mode": "chat", + "supports_reasoning": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supports_parallel_function_calling": true, + "supports_web_search": true, + "supports_url_context": true, + "tpm": 8000000, + "rpm": 100000, + "supports_pdf_input": true + }, + "gemini-2.5-flash": { + "max_tokens": 65535, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 1e-06, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 2.5e-06, + "output_cost_per_reasoning_token": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_reasoning": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supports_parallel_function_calling": true, + "supports_web_search": true, + "supports_url_context": true, + "supports_pdf_input": true + }, "gemini/gemini-2.5-flash-preview-tts": { "max_tokens": 65535, "max_input_tokens": 1048576, @@ -6768,9 +6925,9 @@ "max_audio_per_prompt": 1, "max_pdf_size_mb": 30, "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "output_cost_per_reasoning_token": 3.5e-06, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 2.5e-06, + "output_cost_per_reasoning_token": 2.5e-06, "litellm_provider": "gemini", "mode": "chat", "rpm": 10, @@ -6797,7 +6954,8 @@ ], "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", "supports_web_search": true, - "supports_url_context": true + "supports_url_context": true, + "supports_pdf_input": true }, "gemini/gemini-2.5-flash-preview-04-17": { "max_tokens": 65535, @@ -6838,7 +6996,53 @@ "text" ], "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supports_web_search": true + "supports_web_search": true, + "supports_pdf_input": true + }, + "gemini/gemini-2.5-flash-lite-preview-06-17": { + "max_tokens": 65535, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 5e-07, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 4e-07, + "output_cost_per_reasoning_token": 4e-07, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 15, + "tpm": 250000, + "supports_reasoning": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite", + "supports_parallel_function_calling": true, + "supports_web_search": true, + "supports_url_context": true, + "supports_pdf_input": true }, "gemini-2.5-flash-preview-05-20": { "max_tokens": 65535, @@ -6851,9 +7055,9 @@ "max_audio_per_prompt": 1, "max_pdf_size_mb": 30, "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "output_cost_per_reasoning_token": 3.5e-06, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 2.5e-06, + "output_cost_per_reasoning_token": 2.5e-06, "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_reasoning": true, @@ -6880,7 +7084,8 @@ "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", "supports_parallel_function_calling": true, "supports_web_search": true, - "supports_url_context": true + "supports_url_context": true, + "supports_pdf_input": true }, "gemini-2.5-flash-preview-04-17": { "max_tokens": 65535, @@ -6921,7 +7126,51 @@ ], "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", "supports_parallel_function_calling": true, - "supports_web_search": true + "supports_web_search": true, + "supports_pdf_input": true + }, + "gemini-2.5-flash-lite-preview-06-17": { + "max_tokens": 65535, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 5e-07, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 4e-07, + "output_cost_per_reasoning_token": 4e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_reasoning": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supports_parallel_function_calling": true, + "supports_web_search": true, + "supports_url_context": true, + "supports_pdf_input": true }, "gemini-2.0-flash": { "max_tokens": 8192, @@ -7067,7 +7316,8 @@ ], "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", "supports_parallel_function_calling": true, - "supports_web_search": true + "supports_web_search": true, + "supports_pdf_input": true }, "gemini-2.5-pro-preview-05-06": { "max_tokens": 65535, @@ -7112,7 +7362,8 @@ ], "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", "supports_parallel_function_calling": true, - "supports_web_search": true + "supports_web_search": true, + "supports_pdf_input": true }, "gemini-2.5-pro-preview-03-25": { "max_tokens": 65535, @@ -7154,7 +7405,8 @@ ], "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", "supports_parallel_function_calling": true, - "supports_web_search": true + "supports_web_search": true, + "supports_pdf_input": true }, "gemini-2.0-flash-preview-image-generation": { "max_tokens": 8192, @@ -7479,7 +7731,8 @@ ], "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", "supports_web_search": true, - "supports_url_context": true + "supports_url_context": true, + "supports_pdf_input": true }, "gemini/gemini-2.5-pro-preview-05-06": { "max_tokens": 65535, @@ -7517,7 +7770,8 @@ ], "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", "supports_web_search": true, - "supports_url_context": true + "supports_url_context": true, + "supports_pdf_input": true }, "gemini/gemini-2.5-pro-preview-03-25": { "max_tokens": 65535, @@ -7554,7 +7808,8 @@ "text" ], "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", - "supports_web_search": true + "supports_web_search": true, + "supports_pdf_input": true }, "gemini/gemini-2.0-flash-exp": { "max_tokens": 8192, @@ -8381,6 +8636,24 @@ "mode": "image_generation", "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" }, + "vertex_ai/imagen-4.0-generate-preview-06-06": { + "output_cost_per_image": 0.04, + "litellm_provider": "vertex_ai-image-models", + "mode": "image_generation", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "vertex_ai/imagen-4.0-ultra-generate-preview-06-06": { + "output_cost_per_image": 0.06, + "litellm_provider": "vertex_ai-image-models", + "mode": "image_generation", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "vertex_ai/imagen-4.0-fast-generate-preview-06-06": { + "output_cost_per_image": 0.02, + "litellm_provider": "vertex_ai-image-models", + "mode": "image_generation", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, "vertex_ai/imagen-3.0-generate-002": { "output_cost_per_image": 0.04, "litellm_provider": "vertex_ai-image-models", @@ -9416,6 +9689,21 @@ "mode": "chat", "supports_tool_choice": true }, + "openrouter/deepseek/deepseek-r1-0528": { + "max_tokens": 8192, + "max_input_tokens": 65336, + "max_output_tokens": 8192, + "input_cost_per_token": 5e-07, + "input_cost_per_token_cache_hit": 1.4e-07, + "output_cost_per_token": 2.15e-06, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_prompt_caching": true + }, "openrouter/deepseek/deepseek-r1": { "max_tokens": 8192, "max_input_tokens": 65336, @@ -9461,6 +9749,28 @@ "mode": "chat", "supports_tool_choice": true }, + "openrouter/google/gemini-2.5-pro": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 7e-07, + "input_cost_per_token": 1.25e-06, + "output_cost_per_token": 1e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "supports_tool_choice": true + }, "openrouter/google/gemini-pro-1.5": { "max_tokens": 8192, "max_input_tokens": 1000000, @@ -9496,6 +9806,28 @@ "supports_audio_output": true, "supports_tool_choice": true }, + "openrouter/google/gemini-2.5-flash": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 7e-07, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 2.5e-06, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "supports_tool_choice": true + }, "openrouter/mistralai/mixtral-8x22b-instruct": { "max_tokens": 65536, "input_cost_per_token": 6.5e-07, @@ -9638,6 +9970,23 @@ "supports_vision": true, "supports_tool_choice": true }, + "openrouter/anthropic/claude-sonnet-4": { + "supports_computer_use": true, + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_reasoning": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, "openrouter/mistralai/mistral-large": { "max_tokens": 32000, "input_cost_per_token": 8e-06, @@ -10566,6 +10915,46 @@ "supports_response_schema": true, "source": "https://aws.amazon.com/bedrock/pricing/" }, + "apac.amazon.nova-micro-v1:0": { + "max_tokens": 10000, + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "input_cost_per_token": 3.7e-08, + "output_cost_per_token": 1.48e-07, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "apac.amazon.nova-lite-v1:0": { + "max_tokens": 10000, + "max_input_tokens": 128000, + "max_output_tokens": 10000, + "input_cost_per_token": 6.3e-08, + "output_cost_per_token": 2.52e-07, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "apac.amazon.nova-pro-v1:0": { + "max_tokens": 10000, + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "input_cost_per_token": 8.4e-07, + "output_cost_per_token": 3.36e-06, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, "us.amazon.nova-premier-v1:0": { "max_tokens": 10000, "max_input_tokens": 1000000, @@ -11051,6 +11440,93 @@ "supports_reasoning": true, "supports_computer_use": true }, + "apac.anthropic.claude-3-haiku-20240307-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "apac.anthropic.claude-3-sonnet-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "apac.anthropic.claude-3-5-sonnet-20240620-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "apac.anthropic.claude-3-5-sonnet-20241022-v2:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "apac.anthropic.claude-sonnet-4-20250514-v1:0": { + "max_tokens": 64000, + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "search_context_cost_per_query": { + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01, + "search_context_size_high": 0.01 + }, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_reasoning": true, + "supports_computer_use": true + }, "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { "max_tokens": 8192, "max_input_tokens": 200000, @@ -14506,7 +14982,7 @@ }, "deepgram/nova-3": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14520,7 +14996,7 @@ }, "deepgram/nova-3-general": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14534,7 +15010,7 @@ }, "deepgram/nova-3-medical": { "mode": "audio_transcription", - "input_cost_per_second": 0.00008667, + "input_cost_per_second": 8.667e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14548,7 +15024,7 @@ }, "deepgram/nova-2": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14562,7 +15038,7 @@ }, "deepgram/nova-2-general": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14576,7 +15052,7 @@ }, "deepgram/nova-2-meeting": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14590,7 +15066,7 @@ }, "deepgram/nova-2-phonecall": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14604,7 +15080,7 @@ }, "deepgram/nova-2-voicemail": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14618,7 +15094,7 @@ }, "deepgram/nova-2-finance": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14632,7 +15108,7 @@ }, "deepgram/nova-2-conversationalai": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14646,7 +15122,7 @@ }, "deepgram/nova-2-video": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14660,7 +15136,7 @@ }, "deepgram/nova-2-drivethru": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14674,7 +15150,7 @@ }, "deepgram/nova-2-automotive": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14688,7 +15164,7 @@ }, "deepgram/nova-2-atc": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14702,7 +15178,7 @@ }, "deepgram/nova": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14716,7 +15192,7 @@ }, "deepgram/nova-general": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ @@ -14730,7 +15206,7 @@ }, "deepgram/nova-phonecall": { "mode": "audio_transcription", - "input_cost_per_second": 0.00007167, + "input_cost_per_second": 7.167e-05, "output_cost_per_second": 0.0, "litellm_provider": "deepgram", "supported_endpoints": [ diff --git a/public/docs/assets/hierarchy.js b/public/docs/assets/hierarchy.js index 5f6f3a2..13ababe 100644 --- a/public/docs/assets/hierarchy.js +++ b/public/docs/assets/hierarchy.js @@ -1 +1 @@ -window.hierarchyData = "eJyVlV1PgzAUhv/LuT5qC6Wl3C0xMUtmNH5cGS/IVh0Za7HtjInhvxuYW4qL0N1wAQ/Pew6cA99gjfEOiheRIWUSM4k5f0Ww6q1WS18Z7aD4hkx2R11uFRRw1+zPI2wqvYIiyTjCztZQQKW9sm/lUrmrX+py7bc1ICzr0jkowLvVRXfbxRHtLq6remWVhuKFci6Q8lwgFYQjFSl7bRFEFhQw183O3+18s/NPZqMmSznhzy1K5H0NeVDDsyvf1URuz0RktQg5D9wPyjVGuyn9ATv7EZOka4eSJMh89FaV28jkIRzXIWXhDC0Wt8cMmuSHjF6h3NVicfuvdX9i0FFKMmRcIE8pCkmQskR2LaYkHJuZ9mtrmmo5lnyEJvJbBMZFuBV1XW7LMfWemPbylA62TenZfNTbE9NeIUngvTHmvVZj3j0x7ZWDN/s1XuxXTKWUklB5Y83HaKHWfERIEx5Kr5VqHpXajIkPTIScJaF8dj//pKNj1gHnjLhkErvHgl0b/frywfgd5zbu6/wXj9xhnp+OfOTvIGQj0wThJ4sQ+/MJ2Ni0lJ2sR1zagI1Ja9sfdpJmqg==" \ No newline at end of file +window.hierarchyData = "eJyVlU1r4zAQhv/LnKdbjT8k2bfCQgmkZNnunkoPJlEbE0dyJWUplPz3Rc4mKJvWVi4+2I+fd0bM2B9gjfEO6ichkEpCzlDKZwSrXjq19K3RDuoP4CxcdbNVUMOiP9xH2LR6BXVWcoSd7aCGVntlX5qlcrf/qG9rv+0AYdk1zkEN3q1uwms3JzQ8XLfdyioN9ROJkiEJQUiiIiSZsec9ghBRATPd7/xi5/ud/2U2arKUC/7aouRQg4wP4bdrXtVE7sAkZAW3jNw/leuNdlP6I3b1EbMitEOsiDIfvVXNNjH5HE7rkEqK4ubzh1MGZfKYMSiUu53PH760Hm6cdZQziYUg5DlHUXGkIq9CizmLj/VO+7U1fbscSz5BE/l7hELEHS26rtk2Y+oDMe3lOT/bNqXvZqPegZj2iir23hvz2qkx74GY9lZl7H0fL/Y9pVIiivf93pq30UKteUuQZiKehu9K9Y9KbcbERyZBXuRVPGo/Zn9odMwCcM2IVyXHcCwY2hjWV5Tss+FO+zr/jyfusPhk5BN/BzGbmlbRxSKk/nwiNjFNZuxiPdLSztiUtP3+L3ZlZn4=" \ No newline at end of file diff --git a/public/docs/assets/navigation.js b/public/docs/assets/navigation.js index 1d002d5..1f355a1 100644 --- a/public/docs/assets/navigation.js +++ b/public/docs/assets/navigation.js @@ -1 +1 @@ -window.navigationData = "eJyNl11v2jAUhv+Lr9E6WNtt3HWtOiElgxWqXVS98JIDZDi26zgMNPW/T/mA2Ng5zq3f8zyx4+MYXv4RDQdNpkRSVYAqyIhIqrdkSnKRlgyKqzb4sNU5IyOyy3hKptcjkmwzlirgZPpyliQihW9MJLtOsy55ojPBO9G5yFbeXr+PzqI/heCYo8oRPKdql4q/qOJUg2gOddBrOOTMgV8N/I7rrRIySzpJwmhRQHF1jmzBePLF5Bez/djDVsMY9wAglwA7Fz0lGP1diA0Dl23GUVKJNw+nxBtGRVHsQlEUY0wsUmDPBd145tllmGHOGM2pSzfjKCmB3808ZD2OkQcfdsCZc6PMZd18nSDjGtSaJmY3tUW2cHJz62wvZrMqwqqVEAzxVDEmmXFZ6nmpZalXYgc9c3KqMGUUxbMT6rWZBZgohsLuMcPRZhje9BL2qq0KVFU3F6oyK8Kq3l3rYlyCzSQ4h8XlZWPgC9994+A6o2ypFdD8CQopeOHfJW8lpkZtQwTNozK+qV7hPWX+t+xUhZXozIYvsHfjQ1uOrmfIMp57z5Lna22jvxSVEtLeuRv5QE3vSi5qMF19ETsnQR/l6ZL2n4SPXz+PbyYX92aPxohClvoTecnXgyEyiuIlqH2WgPN8IwpZ2g/iveAauDMROx3oehLmDxFLVEVBS/VLwOGrwUHkim688IpuBvF1V6+O0l2ClYZczffQPf2Ny05Drp8lZZk+PmZMg7pUWWHI1HbFD5o7UzKikCWFNS2Z0Sx7qjL6u/rb0Ua24dPk/fU/MkNA7w==" \ No newline at end of file +window.navigationData = "eJyNl11v2jAUhv+Lr9E6WNtt3HWdNiElg7VUu6h64SUHyHBs13EYaOp/n/JBYmPnOLd+z/PEjk9seP5HNBw1mRNJVQGqIBMiqd6ROclFWjIortrg3U7njEzIPuMpmV9PSLLLWKqAk/lzJ0lECl+YSPa9ZlPyRGeC96KuyFbeXr9NOtGfQnDMUeUInlO1T8VfVHGuQTTHOhg0HHPmwC8Gfsf1TgmZJb0kYbQooLjqIlswnX0y+dXiMPWw1TDKaU2TXQ5ce+AuwwxfAeQjwN7lzwlGfxdiy8Blm3GUVOLVwynxilFRFLtQFMUYE4sU2FNBt5559hlmWDJGc+rSzThKSuB3Cw9Zj2Pk0YcdcaZrtaWs27cXZFyD2tDE7Me2yBbObm6d7cVsVkVYtRaCIZ4qxiQLLku9LLUs9VrsYWBOThWmjKJ4cUa9NrMAE8VQ2D1mONoMw5tewl61VYGq6uZCVWZFWDW4a32MS7CZBOewuryuDHzlu7EcXGeUPWoFNH+AQgpe+HfJW4mpUdsYQfOojG+rV3hPmf8tO1VhJTqz8Qsc3PjQlqPrGbOMp8FvyXNa2+gvRaWEdHDuRj5SM7iSixpMV1/lzpegT/J8zfu/hPefP05vZt4bf32S4JisNOSqbtuBKRlRyFIft5d8PRgioyh+BHXIEnCeb0QhS3u43guurd9BjchOR7oeBHNerREFLdWvCoevBkeRa7r1wmu6HcXXX4ivO+w05GrOVvckaVx2GnL9LCnL9OlbxjSoS5UVhkxtV/yguTMlIwpZUtjQkhnNcqAqo7+rP0FtZBs+zN5e/gPjynDP" \ No newline at end of file diff --git a/public/docs/assets/search.js b/public/docs/assets/search.js index 34cafe6..840f971 100644 --- a/public/docs/assets/search.js +++ b/public/docs/assets/search.js @@ -1 +1 @@ -window.searchData = "eJzFnV2TI7expv9L67aPTHyTutOxvWcVK9lnLXk3NhSODqqbM8Oj7mabZEujcPi/bxDFLAJZb+KjSO9eSTFdhTdZlQAy8wEK/7jb73493H314z/uft6+Pt19Ze/vXtcvm7uv7t7W+8Nmf7i7v3vfP999dfeye3p/3hx+d/73Lz8dX57v7u8en9eHw+Zw99Xd3T/vqRV/aeZx97T59+fd489jQx/eXx+P293rpanxGtDo/cmQzesxsQjq/Ndh91qQOP35itZf1vufn3a/lhTokitUPscbJYHPL8+9bSu9HBv/+vX4ab972z6OEudWfjf+pfhOndLJS309HPfvj8fdvtLaF/ml2PCLaYnpC315MD+tD5u/7p9rWqfL3vf5Lzgenv5te/i37eunzX573Dx16W4+HzevT/GmonBy3Y2UM3cWVM/X3EjxZf354fhp+/rz9vXjw3H38+b1UDMA33JLe9rNuLX65nBYf9zUtS/X3Up597SpejpddEvNv55+SJPw+/nKG6nv3uJAV5O+XHYj3WG8rMmOV91I9e/v6+ft8bf/tn0+1sXPF3+gi29kw2Gz/2X7WH3Zl8tupXvcb9YvVVm66kaqx83L22a/Pr7vq784v/RW+qchsqp8vuhWmrvdc7VH0UU30vz6P795+F9//Mv33/z5TzXl9dv24ZfN/rBlM1iX3h/++N++/uu3Pzz8+9ff//Hhr3/5tib6tPmwfn8+PpzChAceJ8xS/u7Pf/hjs+x0zO7S3B7+fbPeb/Zfvx8/1SS3h5/itevh2hu93+3h293jujo3bQ/P58tuOk5Xvfly2f+XUfLfVNe71V4re9Fav23/x+a3hm7zc7xq/i9kuo+f1seGyPp0WW+PYUp93sOe5pW/8vn55b9v1k8NTvT8/PJpvHL2b31+fvlzW1Dz/Pxyi7iG6ceh5tDwXocLr3yzff3z2je7sJecdv3Tbn+sdprzRbfSfHr6bgj6q8JPTy/jlbdSPxy2h+P6tf6rkwtvpH0aBFoGitspftgcHz99F320JhwvfaFLb6Uf4+7/OQTh37UkZsMd57C9f8bP1D9ujr9vG54/nn78dIS+6rd/3Bz/MEQu3zVnhx83x3O0c6M8kVvU5gsfN8dbe8Ko3fY28Lh6rQWpI7Y8hNQNb/gs4kD++93rcVMfhuK1j+O1M3tCqvj7T+/1bCqVfTzfcI32d43loHjxTWpCwIaW8Wcw4LpxJ7bRGL/Ea1EE06/4w7mS2CR5vFx8C812pyLhW3jVD6eiZdPAOkifLp+OqXN0G4oDZ0leIZip1vGAT5ff4um2P9gbz1P7zYf95vCpfdo83/CvmDP3m8Pb7vXQYMJ43Y2UD5vXKjc5X3Mrxd8Ox029ukhX3Uj12DhswRHrOuXd7vn36+fqnHC67nG47kbK7w118/drq+aZ4i+b/fbDb7/fvb5uIhatqQ/XP6bXX1EVSjHqf37zi5qqn/71Nvh0bKkNnUZzerHpRaMbmZb0ZFx6EexHpSVFjEkvan2ItKTUhEcvwleh0aodbfK3VBXj30SzO+wtKuJoN5HrQ6BVLWGmZoKd03NJVcSeF8nu0mBJT8KdF7le1FlSq2DOi+hMxFnSFgv3F9VutFnUE7BmIteJNEtqRZx5kZyFMou6GGMmin0Is6iFM5REqw9dlrTqGPEi244QWxQFfDiVqyT0Ja0yNrxIiciwQ0kAPKlIJxos6f2PP/6fh7/88evv//ynb/70Hw+///Offvjjn34oqf+8+e1hv1kfdq+nSblalKqPpUUP7caRtxnRShiSKTQiyItEL34s64noMYmAa9ixrNDukT1QqqxZQo0XWQEzXqcrluwy3WtjinbUyEKnMmYsq7T3t2veZANaTDpDH1YsahWQYiI4AycWVWWUmIj2Y8SSJkaIeYe/jVIRHV4EZ2HDom4DMkzkO3BhSbWECi9qczBhRbUBEWb6V+DBiiX1dz0DCzZp1p/6DBxYUa6gwEx9JgYsWVBGgGxQ7o20etEflqsghKqmjPyY3tV1j2bUx4XnjxdlxMd0qnivqiSjPSZVx3rNWm3O0ojz6qoFlMclGzBeg14lQW7Bd20qjQ+yAdtV9doe4A3njQZUdxG/AtOVbZAQXSrdi+dKihjNpalkD5YrKglILtHqxHElNRnFsSJVF4YrKooILi9V9eG3kiJGbxe1PuxWUqojt4vqfNzGqx4JavvDZvP2/WYzHXvoDzcBblljTcxttKsTu+VKveStpirCt1y2m7/VdCGCyzW7KFxNrwXE5fLXsLgma5qNuLG2FKEy5d74tKoLo1Mm2kXnmhTx/Axk++bmmraE6XLh3qpaTVWAdbloJ6+raZaRXS49j9rVLJDK3Ll2L7urqmJ8x0T7CF5NswTxcuE5HK+qDlEe0+2ieVVFmK8wxS6mV1OsYr1cvJnstepiuIdFy5l8TbGI+HLBWRsD6/oYs3DpPvZXU23Df7kNdQJ4/Zhd8/JeLnjTMbNAB4FOGyDMhToZYV1VwoQsbu+sa9d1u7y6gzrVlQsIMRefQRGb1KXC4ET9BlFPM04EgV5XRb2u3dV/r3zjddTIulUXbawqysCRyfYzx6q2iB2ZdDd5rClD+DgdRm6mV0KQuewcCllVr4NIZkSZRV5jSwFP5jbMIJQN2nVIObFiPqdssKfJJ/ppZbNy03u4zQjbhS0nNswjlzU7ivASDPY3iBM7kaZsxJSwXG2JCDqBFbeoJbXiTiR/01GpCEGB+i2inXY0CgzoZhddFjS7osxMr7dFxqjIEExSb2BFvVgh8NXbaLe/Ckxdr7ai+RXc9unXcWxuwnwiW7dEgLLcgE4uW9OFaJan8h10tqqHAS1T7GO0NU0R04IC5C1HOxHWTsuQXby2pguRba7ZRW1relVwm2vPZreoMpXg2//Y7T4+TzvQ8M83QbdJU03g9mxRJ7ZNVXqhbVlRRLapZDewLWtCXJvqdcHaslYLqk2lrwG1DZY0GnBTXSm2zlR7g+qKJoymM8GuMLpBDc/VE8m+KbqsK2HZVLQ3YC8rCkg2FewEsmW9Mo5NZefB2LK6hBVS3V4QW1HEGDYT7IOwZb0Sgk1F5wDYijLEr5lmF3ytqMFsJlPrSmPKalXsmgo3Q9c2TYxckWAZuJbVirg1FZsFW2vaGErlsn2gtWXUK/tPL9K82chTwJkTjTaYmYp0osyaogQysyi2suGxptHhHx0Aq6ZaAJapcMO3VRuUpIIdU7p67m8Gk5Mwp7jRsabT0d+ueoN1AJl1hS78WFGT4WMm2Y8eK7oieMxku7FjWRVCR97lb6RVAo6p5BzcWFGuw8bMgPZtj2XdAlhM9QSs2KVTh4hMcT5CrNrS8I778WGjasOznjMKdoFCpjcPE5ZtKELCyWBc2+LYroVL/5JgX+m/wQoRBk4suL5q0QoCp9JXjBhF6DdRqm19bNASAd9ErLr5sUOt0Y3mobwWO2SQNzWiviGySbGW5jZsiWzVaX28/XiuwYLGx3rLOacO5lL5+ViuZoUA5XLxTiRX1oRALk9FO3BcRQvDuEytD8WV9UQQNylF9WC4iqYE4XhBqgvBlTUhgEv1uvBbWasK31Ld2ehtWkFJwdt+9/ep7H7399tAN2qoDbmdbOkFbqNCN24rqMmwbZTrR20FPQzaRq0+zFbQaYJso+xViK1mRZP4DTXFWPWi2B2klvRwdHoR68NqNSVhns3lOqfXgqYI1EbB7pJaQU2CaaNYL0oraFVA2ig5E6MVlMVS9qjZjdBKagJAu4h14rOCVhGejYKz0FlJFYOzi14fNisp4WziotSHzApKdWA2irbjsgY9AZZNxCrJdkGpDMpGoXmYrKgrQJBEshORFdTa9iFetPu+QlofQ0te2g3mbjGWlaBc3n4jkhsFeoFcUU3EcZf4tnMnSlmv2St7sE5RsYTlRtEZewirqmJ5LVW9Mppox3N5wNS1o6Ws2dz/rnijDZju0j36IF1JqYDoLnIzAF1JU8ZzF8l+OFdQxGgu6/430SliuVFuFpQrqTYguYt4B5AraJZw3Kg1Y49fRbMBzaXqV4C5sh3VdzwDyrUoVp/39SNfH55LtWfCuYJ+Gc3lg2/v3r1WXQEAQPFOAFCzQEZ0ufq1tY9mPMdkr6mCdKC6XLUK6mo6MqbLhfrL4a3KTS41E9hVbSjgOmZAA6yrq5WT6xl769o02x7xDGhXU296tLebixpw3Sh9BawrWiChukS4F9QV9DCmS1LTHkhX0hEQ3UWpE9AVtGQ8lxe8bjUayWguK3v1gbmCHsZyo1YflCvo1JHcqDkfyLHKSYLjvv32u4nit99+dxMYR+00sbiTIZ0obmwfkbjGtkXwNjYOuVtj6xCzjS1PKFtjqy1QbRSpMbUezRap+QpS5HhpH8WMra3D6PDSdDGrrLWLZ7G88XJUUlCQ0NfYfC26LLQtgK6xacC5GlsuY61RQKZajTpS4XdUQAyrtW2MrC5NT4lVY8slQDU2L/GpVg2Ioy6tT2hUa7swPr60W1zCVmi3ippGiWbS1KCGQdNEavb4UMRMo0zT+XZFFVy+TwSmTKlrjCi8ccRxbtx7C9Qmb70N2ozNA2bT3LaEaC5RT2VRf7H11jc698kU8Mso0bAlqqYhFUhSjb75qxmt5NNvccF/UaHV/9vfRB2bXBx0Qk1a25UhyaVxzEhaFUQkchGARKSxfQhAss41p9US7hgbl2hHq0Ydblyk2tlGQbGANkalhm1GZYU6yEi1yhyjXbX2rjC16G2/9uS6x5AuJJEqyUSiUa0IIPJBq7a+o1EFFy6hVPkMppqeCBdyrc48sRUlMJG5fbUIDnKNzpmxHRvkMtWtPa06LY7QdqZZVVFGAkyuTgTqWsWEp2HjTpNC08Orn2NW02p5aLOfV720PwqVK/vNekIhP5EBdfzG1mHZPklNXltrgi1F+ku70xp9Y8tiST5P+mf2dLEAn6X+k/p7Y+uw3D62PKm2N7ZaLa6PCsXaeiGbTUrpBa+//OkmhXXWXFN9PbGuMxnnarWKWl2pWurhks0VH6gNQ76qJkV9V2i94HCPK1VivbrOeSytCl2um6u0fnr6/fvhuJuOX1xr/fT0SFfOVXt83qz3jXrx2qsVP6xPnWgLQkyul145Ww1XxydS5ep4k8+3uPt1Cl+DqQGIlOaHJp1//+37xoHq4+b402+zRitRs3noGIWvHUNOqX1bDzgl+Nf6/6hW7wCj3P/Dkevf9FVaL7tfNo0Pc7h4zvNMQ4I/Pz+vX9YTseGfbxIKJE01hQFnizpJe6rSu+21rCjy91Sye+trWfO/EJVP9f6rZ/trWauF1afS12yBbbCk0YCb6kpFm0y1dzloRRMO05lg10LQBjUc+E8k+xaxlXWllQGpaO9GlrKisF4gFezcGlvWK68iSGXnbY8tq0sJUarbu0W2oohXHGSCfdtky3qldQip6JytshVluDoh0+zaLltRgyW8TK1rQW9ZrZrepsLNqW2bJl7PgATLgWlZrbiqIRWbtX22po3JeC5bXu7QMsqV/aV38+rNRpoCZp5otK2GSEU6N7HWFKU1ElnUWoGSNY0Of+jY6lhTLayfSIUbllA0KEm0iCldPdc3r62YhDVFNNqk0yRzjUpHr77KT+qrO7IO17UttqImr/nIJPu3xlZ0xZUgmWz39tiyKlwfwgeWG2ldVo0UFS/rRjq8tHV1ChbqGVc616hkgvOP3yzbUVi5kurP2JZb1a2vZ2EWzN+aW7Wl4Z33b89tVG149v1bdKva5RUxTH/eNt2yDcV1MpNJobZUpl0Lc35JsAz7G1TFNTMTxetrMK0raabSV4yZxfU0E6XakpoGLXFVzUSsits71BrdZt4W3BY75BU3UyPqi26aFGtJe8PSm1ad1sfbv/22wYLGx3rLOae+UieVn78Nt2aFsH4nF+/cilvWhKt68kS7YztuRQuv9cnU+rbklvXEFUCTwlrPttyKprQuiJfXurbmljXhaqFUr2t7blmruoYo1W1eRjStB6XY8G3z+vU3U6H4z7fBhpem2rDhYFEvNkxUurFhUVHGholkPzYsamJsmOj1YcOiVhM2TKSvwoZ1SxoNuKmuGLemqt0ha1kTR6upYF/qW1cT5l4u2TnpFnVFbJiIdpcSi4oSNkwEe7FhUa+CDRPZmdiwqC4W8xPdbmxYVhSwYSrYiQ2LekVsmIjOwoZlZYwNU80+bFhWwxlIqtaHDYtqdWyYCLdjwyZNARsCwUqqXlQrY8NErGk/dE1LwEKZTN+XdltGubK/dGPDW400JWzINRqxYSLSiw0riiI2TKPWGjasaHT4Rw8OqqiWsGEiPOO7tw3KYpEsV7567m/HiDzMKWPEik5H/7vqjTYAvrRr9AG+sloB8KWSMwBfWVcGfKlsP+ArqmLAx4aAG2kVwVsiOec7uBXlBhCXGtC+X7ysWwJvid4c8FbTbQBvuQVXgLeaLQ3vfAZ4a1NtePYzwFtNuwLecv2Z4K1oQxm88cG6Ct6atYQSvyBYAW91VRm8ccXrqxjN4G0ifcUIUgZvXKkK3upaMnjjYnXw1q7W6DZt+9pbdAugbSLaANpaFGtpbgtoa9RpfZz1ne4Nio2P8ZZzSgNYS+SvAGsVKySwlon3grWiJgZrWSraA9bKWgJYS9U6wVpRTwZrvPTUBdbKmiJYYwWoPrBW1MRgLdHrA2tFrTpYS3Rnf/t2WkFJQNtnQNk+3wixfR7TlAa+9rkfro3t95K1gpaI1UaxbqZWUINAbVTqomkFlRaUNopew9FqNrRI305Rij4ver1RZ0kNRpsXqS5qVtPBs2ou1jeXFhQlWDbK9VbLCloCJvucxSrto29BqQzIRsF5dKygKxWsP19ChD4uVtLCUOwi1UfECkolHDbKzWFhJU0Iwj5nochNdGBu8DkNP27i+1X4NUo2k68GNYy9JlLlLLmgUwReo8ysTXJFVYw2EsE+7lXQajtgclSuny951dhZ8NRe8naDUazA3PLW24Db2HwnbStqSajtEr12Vn+Laq2e2cFninoF3DZKzmBtNU2pKJZqXhc3NCO2PCzqqiQXFVv72/w3Wcdslw7RxdhKOjJgu4j107WSoojWLoLdXK2gB6Fa1tlvoVLCaaPYHJZW0qyDtIv0/O1sBQsKSG1UnsHTyop1mJZqzydpZStq77qfobXo1Z701WNeFzpLledxs4J6EZrlg+51UVUnPoPSfRtkavoiSMu1r6xntCI0JnqrsaMI03LNKyOHdqyWy3ZXt1t1Wxxr3ra2qgUyamPymLNdp11MrGecMtmk2PSw+ze51bRbHvLNnm8dwY3C8/lbUV+Ab4lsJ3krqEHslqSiHcytpIKB20Wnj7YVlETUlhe3bjQSiZAtK3F1EbaCGsRro1IXWyuoVMHaqDibqjF150fxr1+Pn/a7t+0jn8y2r8fN/sP6cXP4Hb+myNuy0gsrjdTa7C2VTIxvZHJVO3oZXashE2BXtaQb4LWaktG8qhlddK/VhBLqq1p0DfrrMnCOXf8ic3hkXTemN8huNiULtut2dMXdrUYwAFe1ohPItZqB6VzVmnm0rtUoXvSumtOL8poNyble3Y4+ztdqBoJ+VVvmQMBmgzIiWDelixB2GZEGb212lJZqNktnCVVdtyu3KhiRBj//sdt9fJ6k8Ykt2QW3CHumDfbGPLnN/QEPsKA32mky4VxbanysXySXY79qEi3FV0C0O7hqMuLj5vU0aGx3r7/fvX7Yws4FjLnc9ki3XfEkpPAOCHfFdk3ijYEdsOWaqK7dtG6L/hWGFII5ZEZvJNdmhBTGIQu6YrgmeTmAA/qd0VuTAdXQDdgxL25rMqcQtAFDeiO2NhPEcA1Z0BertRkQK1MP2/Mi3G3zMAZvvGIErUSMwII54WKbKVKsiIzoChTb5KVQDcl3xWmS/DRIO1XDZRNOf20Pz542h8f99q3sW2OTX+SXlzwqGimIxv80qJ2vmyvztt6vXzbHdK1HQSy7ukMyfT/fvL69H//8fnx7P/4gTqyTi9rf1vZ0a2HOxk1/wW7DP25qu7SuOF7TbwW/r9+M9FF/++1335AkMiL9+1WbIkptNm2QyCwVnumYpLXJrpPLrxD9z29+UY2Cb9tfiofEV8X+sNm8fb/ZwKF7qve02bwdhqvnSw7dtU3wI117hdx+9/dGseHK+VKnkzyblJ6fi8exVoUAiCzqtZ2JW5Vlh48VJXd07RVy+UcLy3Jvm9f19io5vgCxqIcW//YKnqPjRsXk6vmSn1uf5+f+h8mI9cftQchYpmrJ1fMl31/7RLPr+2STCY+v7kwkz39qjyMe2Wosoa0val8wIJsEmf0OD7+ZxvmiVoH0kQzDRKFmmV1wi5rltMF5587Mr1kCC+Yda3hF+RDYMPOcw5mVO2BAV+WuSbyxcgdsuf4kxKsqd5JF/wpDCpU7ZMa87/TPrdwhC2YcAFKR5/ssywa0fcV/drEQKM461vDaYiGw45pzDucXC4Eh8w4+nF0sRBbMOQnxuhodsGL+0Yhza3TIiEmNrl9OqskhuRmHJZZrckPyUApC0gtuEoRMGpz3FcsrgpCpBfM+kn5NEDK1YeZX0ytGxOpZowV0reDNLXJizDNV64t5WsRPcUK1vgcsQfdd8QxaQy9syHVfk78u9BIs+lcYUgq9gBnzvtQ2O/QCFsz4AP38OGiqP+s77VfHQVM7rvlwe8WccX96oynp9Vd011L4NRWd9wH5+eEXsGDOF+WvDL+mVsz/xPzs8AsYMeOb87PDMSA/4yP0LeGYhEgvf70ZImVNNiHSxMhORMrVioi0LlNGpFysAZHWJQ/H/faxEE9d5MYr50odf3treojn6zpkco8rhP43DPrL4X5u8vzAvhLSd8iUg/da2N4hJEfMpVi5Q6A5HO0IRHvlO1SvEyuGlbWAskdIDh2LQWOHRCk8LAeGHSINIWBT8NchWYy3KpFWj0whpipHUx0i1bipIWLqkZNjo/ai1Oz4pxj5lCXSGec/ZYB8/lNxxmFNHbfr5+/jO/wL3/2bNzy9sIc4vrw9b46dTX+R3IYfFLa/v1JfsKFWse8yodB1CybUOnKfCWK3LllQ7uRdBoh9sKBf7pE1+dTjS04+x69Fks49uYzSaw+tNC/nStWJ+Ypu0tczrugMff7f5HFCWQT4WakgUpXa7Z4fTpvuG55fdu08uXdpTUiuVF6SVekxQ7favn78gX9xINGbXNSxqhWmCLjBL7ZymjC1s7eiL2kWq/rNslJOL6gWU/uS6PTdlca8+TO6OPLhmbzv005tU0lpVERWdFfBrw4sWiOK640ojKatMcX1RpRGWnFe7/kgSqMZxVEYGoLH4utNEUdoZEXfh3ya4h2pFHrTImhf+bNUsxv2KhweP23wAuCLEruyX0oajxurq4WSYGmivNX82Dwtzp4NeybBuXNfx5RXmenEVep/7VsbO7jV4+4An8mlsS+yC7HJfy2tjq1t5plIdVLkovhz9jFjrDr90nZz8+e1AA1PMb9yvlTTc5SXKMx+kMfdcf3c8kOzC2f8zuH+pp/JLm0TS/vS/96v3942T9LUkfy5vV99eH8Vt7DyBr9ILsbWpxZ20p+JWBH/YCHhYUljPrvk5g/tMoL2PDg+mC5WQbnLZri4PYzDrdOzOvwu/VO51MjaPO2Rwk0mf+lq8Zts9hraiv/W1cq33373Pd8kM7SV/KWrxfPOBf7d26HR/I9z2v1LurUia/T0l74WMxBzbuv0b/2t/LD+iBr6Yf2xv604Pv2Q9uGkxfGPc+b1h7fNfhgd601/IdwmbFfJDS/wPDj5y1aAW661AM+bZRPalvi129AlfhvV3VODQ31xvuxaJditgJQcZbVpJUFU9NLLOSWtbl5v4Zb2zbXqFrbwMomsXqs6N+q9v73t9sfDw/r9abt72IL5Corj225oyfBw+00Z77uJLRSwxNpLWjhqsAfcexOb3va7l7fjw+P68VOnRZM7b2LPdFlngyn1tZ2dVvyyPaSRaIMJ4x030f9189PDYbPeP37qsSG76xo7juuPLTPV+bJOJR4BDRh/UrYfZPM/dkVW/xMuVhmazf7WsWTt+Xn36/fbl+3zut7iF/Hqw3g1fky5lSXdv77+/Lr7lTulpPs+Xn2F7nH3xk7REEWPuzdwjEaLIn9v5xTkT2kVbZBN/lJ8Z+bS2PkklbGhX9b77fqn583hd+e/oIb+dn+3fX3afL776h+nr3THjv3Vnf7SfLm6u7/7sN08Px3uvvqRCn2Pu5eXIe952j2+x//92/my/7U5fS7ldPFw9e8Wd/c/Lu6N+3Jlln/72/2PdHP8Q/wHauPyL/FGdXf/o7r3+kvnbXajmtyoshv13f2P+t6svtQqu09P7tPZfebu/keDBM3kRpPdaO/uf7ToRju50WY3urv7H929C19a57Mb3eRGl93o7+5/9PdWf6lWLrvRT2702Y3h7v7HcHo2yxCyG8PkxpDduLy7/3GJblxOblxmN66Et7Ga3LfKX//JG1ZIUU09RzHXib6zAKoKOE/uPerkFErdm+WXbpk/XDX1IJW7kDo5htLw5qkXqdyN1Mk5lEGvVU09SeWupE4Ooix8XlNvUrk7qZOTKAdvnnqUyl1KnRxFefSwp06lcq9SJ19R4d6YL5XO7536lcodS538RS2R7tS3VO5c+uQvCnqXnnqXzr1LnxxGI+/SU+/SbGyKg5OCwmB8yr1Ln/xFazTS6Kl36dy79MlftLm35ku1yL1LT71L596lT/6i7b21X6pFPlTpqXfp3Lv0yV+0gzdPvUvn3qVPDqP9vQlfWsue9tS9dO5eeilNPnrqXjp3L72SXFNP3Uvn7mVODqPhOGum7mVy9zLRvZbocZmpf5ncv4wWH5eZ+pdhE2D0rxVUBnNg7l/m5DFmAW+e+pfJ/cucPMYoePPUv0zuX8ZLb9lM3cvk7mVODmM0FJ66l8ndy5w8xhh489S/TO5f5uQyBnYpM3UwkzuYPbmMgV3KTh3M5g5mTy5jPLx56mA2dzB7chkT4M1TB7O5g9kYYkHXtlMHsyzKig4GvdOCQCt3MHtyGQu9004dzOYOZk8+Y6F32qmH2dzD7MlnLPQwO/Uwm3uYPfmMhR5mpx5mcw+zJ5+x0MPs1MNs7mHu5DMWepibepjLPcydfMZCD3NTD3O5h7mTz1joYW7qYS73MHfyGQs9zE09zOUe5mIgDz3MTT3MsVg+BvPQwxwI53MPcyefcdDD3NTDXO5h7uQzDnqYm3qYyz3MnXzGQQ9zUw9zuYe5k8846GFu6mEu9zB/8hnnUEDipx7mcw/zJ59x0MP81MN87mH+5DMOepifepjPPcyffMYt76360vh8hvVTD/O5h/mTz7gV/M1TD/O5h/mTz3joYX7qYZ5ljDFlhB7mQdKYe5g/+YzXKFP1Uw/zuYf5pZisTh3M5w7mV1K+6qf+5XP/CgspZQ1T9wq5ewUlZK1h6lwhd66gpbw1TH0r5L4VjJS3hqlrhdy1ghXz1jB1rZC7VnBi3hqmrhVy1wpezFvD1LUCK0gEMW8NoCaRu1ZYinlrmPpWyH0rrKS8NUx9K+S+tVxIycFy6lvL3LeWSspbl1PnWubOtdRi3rqcetcy966lkfLW5dS7lrl3La2Yty6n3rXMvWvpxNRzOfWuZe5dSy+mnsupdy1z71oGMfVcTr1ryUpeSzGXWoKqV+5dy5PDeHPv1Jcu5FnJcupey9y9VmLRdDV1r1XuXislueZq6l6r3L1WWsxbV1P3WuXutTJi3rqa+tcq96+VFZ/1aupfq9y/Vk7MW1dT/1rl/rXyYt66mvrXKvevVRDz1tXUv1a5f63E6sRq6l4rVlVdiXnrClRWeWl1ISauw9/y25N/O9+vxNx1+Bu/n5VYF1pMX4e/8ftZlXVhxAx2+Bu/nxVaF1ZMYoe/8ftZrXXhxDx2+Bu/n5VbF15MZYe/8ftZxXURxGx2+Bu/n1VdF0sxoR3+xu9nldfFSsxph7/x+5n/xXI9TmsVqu5PyvtKzGwVrPAz/4tVe5zcKlTk51X+WLjH+a1CdX5e6I+1e5ziKlTq57X+WL7HWa5C1X5e7o8VfJzoKlTw5xX/WMXHua5CRX9e9Y+VfJzuKlT455X/WM3HGa9CxX9W/VexoI+TXgXq/4oBABWL+jjvVYABKAYBVKzr49RXAQygGAdQsbSPs18FSIBiKEDF6j5OgBWAAYrRABUL/DgHVoAHKAYEVKzx4zRYASSgGBNQscyPM2EFqIBiWEDFUj9OhhUgA4qhARXL/d6ifFgBOqAYHlCx4g9TYgX4gGKAQMWaP4zSFCAEiiECFav+MDFWgBEoBgnUQAkQdgOQQDFKoGLhH2NdgAkU4wQqlv4x2QWgQDFSoGL1X4C7ABYoRgtUBAAS3wWOx4CBigxAQLwAGSjGDFTEAALlBdRAMWygIgkQQC8AB4qRAxVhAGa9AB0oxg5UxAEY9wJ4oBg9UBEIYOIL8IFi/EBFJICTZwUIgmIIQUUqAPNnBRiCYhBBRS6AU2gFMIJiHEFFNICzaAVIgmIoQUU6gBNpBWCCYjRBRUCAc2kFeIJiQEFFRoBTPAWQgmJMQUVMgDNqBaiCYlhBRVIA0y0FuIJiYEFFVoB9F5AFxdCCirQAp9YKwAXF6IKKwABn1wrwBcUAg4rMQHr6wPsYY1ARG+AcWwHKoBhmUJEc4DRbAdCgGGlQER7gTFsB1qAYbFBeLKkoQBsUww0qEgScbysAHBQjDipCBCHjBsxBMeigIkcQMm6AHRTjDiqiBCHjBuRBMfSgIk0QMm4AHxSjDyoCBSHjBvxBMQChIlMQMm6AIBRjECpyBSHjBhhCMQ6hIlsQMm6AIhRjESryBSHjBjhCMR6hImQQMm7AJBSDEiqCBiHjBlxCMTChImwQMm7AJhSDEyryBiHjBnhCMT6hInIQMm5AKBRDFCpSByHjBpBCMUqhIngQMm7AKRQDFSqyByHjBqhCMVahIn8QMm6AKxTjFSoyCCHjBshCMWahIocQMm6ALRTjFiqiCCHjBuRCMXShIo4QMm5ALxTDFyoSCSHjBgBDMYKhIpQQMm7AMBSDGCpyCSHjBhhDMY6hIpoQMm5AMhRDGSrSCSHjBjBDMZqhBpyB/Q/wDMWAhoqQQsi4AdNQDGqoCCo8XCutANdQDGyoyCpwxg3IhmJoQ0VagYM3wDYUgxsq8gqccQO6oRjeUAPfgAtdgesxvqEissBZDwAcihEOFaEFzrgB4lCMcajILYSMG2AOxTiHiuhCyLgB6VAMdeiILnDGrQHq0Ax16IgucMatAerQDHXoiC5wxq0B6tAMdeiILmDGrQHp0Ix06Egu8CpWADo0Ax06gguYcWvAOTTjHDpyC2mZNVj6yziHjtwCr7QGmEMzzKEjthAWWwPMoRnm0BFbCEumAebQDHPoiC2EVdMAc2iGOXTEFsLCaYA5NMMcWsmLgTXAHJphDh2xBV4+DSiHZpRDK9n3AOTQDHLoCC1wxqwB5NAMcugILXDGrAHk0AxyaFVYeQ4gh2aQQ0dogTNmDSCHZpBDR2iBM2YNIIfmWxwitMAZs0abHPguhwgt8NtH+xwmGx20mDFruNWBOV9kFjhj1mi3A9/uEJkFzpg12vDAdzxEZoEzZo32PPBND5FZ4IxZo20PfN9DZBY4Y9Zo5wPf+hCZBc6YNdr9wLc/RGaBM2aNdkAwxqEjtMAZswaQQzPIoSO1wBmzBpRDM8qhI7bAGbMGmEMzzKEjuMAZswagQzPQoSO5wBmzBqRDM9KhI7rAGbMGqEMz1KEjusAZswaoQzPUoSO6wBmzBqhDM9ShI7rAGbMGqEMz1KEjusAZswaoQzPUoSO6wBmzBqhDM9ShI7vAGbMGrEMz1qEjvMAZswawQzPYoSO9wBmzBrRDM9qhI73AGbMGtEMz2qEjvsAZswa4QzPcoSO+wBmzBrhDM9yhI77AGbMGuEMz3KEjvsAZswa4QzPcoSO+wBmzBrhDM9yhI77AGbMGuEMz3KEH3IH9D+AOzXCHjvzCe5Qxa8A7NOMdOgIMmDFrwDs04x068gscvAHcoRnu0BFfwIxZA9qhGe3QA+1AeQNgHZqxDh3ZBc5aAOrQDHXoiC5gxqwB6dCMdOhILnDGrAHp0Ix06IgucMasAerQDHXoiC6EjBmgDs1Qh47oQsiYAerQDHXoiC6EjBmgDs1Qh47oAmfMgHRoRjp0JBc4awGgQzPQoSO4wBkz4ByacQ4duYWQMQPOoRnn0JFb4IwZYA7NMIeO2ELImAHm0Axz6IgthIwZYA7NMIce9l3gSQtgDs0wh47YQsiYAebQDHPoiC2k3cbA9xjm0BFbYEatAebQDHPoiC1wzgUoh2aUQw87MaDvAsihGeTQEVoIGTeAHJpBDh2hhZBxA8ihGeTQEVpITx94H4McOkILIeMGkEMzyKEjtBAybgA5NIMcOkILIeMGkEMzyKGXcr0FMA7NGIeOzELIuAHj0Ixx6MgshIwbMA7NGIeOzELIuAHj0Ixx6MgshIwbMA7NGIeOzELIuAHj0Ixx6MgshIwbMA7NGIeOzELIuAHj0Ixx6MgshIwbMA7NGIeO0ELIuAHk0Axy6EgthIwbUA7NKIeO2ELIuAHm0Axz6AguhIwbgA7NQIeO5ELIuAHp0Ix06IguhIwboA7NUIeO6ELIuAHq0Ax16IguhIwboA7NUIeJ6AJn3AagDsNQh4noAmfcBqAOw1CHiegCZ9wGoA7DUIeJ7AJn3AawDsNYh4nwAmfcBsAOw2CHifQCZ9wG0A7DaIeJ9AJn3AbQDsNoh4n4AmfcBuAOw3CHifgCZ9wG4A7DcIeJ+AJn3AbgDsNwh4n4AmfcBuAOw3CHifgCZ9wG4A7DcIeJ+AJn3AbgDsNwh4n8AmfcBvAOw3iHiQDDB5RxGwA8DAMeJgIMmHEbwDsM4x0m8gv8zRmAOwzDHSbiC5hxG0A7DKMdZqAdIO8wgHUYxjpMZBcw6zEAdRiGOszwOSeU9RhAOgwjHSaiC5xxG4A6DEMdJqILnHEbgDoMQx0moguccRuAOgxDHSaiC5xxG4A6DEMdJqILnHEbgDoMQx0moguYcRtAOgwjHSaSC5i1GAA6DAMdJoILmHEbwDkM4xxm+M4T9j3AOQz/1NPwrSfoe+hbT/xjT8PXnmDGbdDnnvj3noYPPsGM26AvPk0++WTEjNvAjz4x34vYAmfcBn33iX/4KWIL4YNV6NNP/NtPhY8/oa8/8c8/Gdn30Pef+AegIrQQPvSFPgHFvwEVoYXwrS/0FSgGOYyV19QbADkMgxwmQgvhi18AchgGOczwNSjho1/A+xjkMMMHofCUCyCHYZDDRGiB3z5gHIYxDhOZhfD1L8A4DGMcJjIL4QNggHEYxjhMZBbCN8AA4zCMcZjILHDGbADjMIxxmMgscMZsAOMwjHGYyCxwxmwA4zCMcZjILHDGbADjMIxxmMgscMZsAOMwjHGY4WtR2H0B5DAMcpjhg1HCZ+uA/zHKYSK2wBmzAZjDMMxhIrjAGbMBoMMw0GEiucAZswGkwzDSYSK6wBmzAajDMNRhIrrAGbMBqMMw1GEiusAZswGowzDUYSK6EDJmgDoMQx0mogshYwaowzDUYYZvSWH/A6jDMNRhhs9JYf8DrMMw1mEivBAyZgA7DIMdJtILIWMGtMMw2mEivRAyZkA7DKMdJuILIWMGuMMw3GEivhAyZoA7DMMdJuILIWMGuMMw3GEivhAyZoA7DMMdJuILIWMGuMMw3GEivhAyZoA7DMMdZsAd2P8A7jAMd5jhs1M48QG8wzDeYYL4HWwDeIdhvMMEOfgDuMMw3GEG3IFDb4A7DMMdJuILv4RfHAe4wzDcYSK+8Ct4P8AdhuEOE/FFWMB6BcAdhuEOE/FFgJ9oNwB3GIY7TOQXAX7O2ADeYRjvMJFfBPzFdsA7DOMdJvKLAD/cbgDvMIx3mMgvgsP3A/djvMNEfhE8vh/4H+MdJvKLEPD96PuzzP8ivwjY/wDvMIx3mMgvAvY/wDsM4x0m8ovlAt8P/I/xDhP5heC/gHcYxjtM5BdL7L+AdxjGO8ywqwN+FdEA3mEY7zArud4HcIdhuMOs5HofoB2G0Q6zkut9AHYYBjvMSqz3AdRhGOowK7neB0iHYaTDLsR6nwWgwzLQYRdyvc8C0GEZ6LALud5nAeiwDHTYhVzvswB0WAY67EKu91kAOiwDHXYh1/ssAB2WgQ67EOt9FnAOyziHXYjTrgWYwzLMYRdivc8CymEZ5bALud5nAeWwjHJYJdb7LIAclkEOq+R6nwWQwzLIYZVc77MAclgGOayS630WQA7LIIdVcr3PAshhGeSwSq73WUA5LKMcVon1Pgsoh2WUwyrZ9wDlsIxyWCXX+yzgHJZxDqvkep8FoMMy0GG1XO+zgHRYRjqslut9FpAOy0iH1XK9zwLSYRnpsFqu91lAOiwjHTaSiyUM+SwgHZaRDqvF9VUWgA7LQIcd9nTgb7kD0mEZ6bDDng78OXeAOixDHXbY04E7L2AdlrEOO+zpwJ0XsA7LWIeN8GKJjzoCsMMy2GFN4eQBADssgx3WFA4fALDDMthhTen8AXQAAXM/UziCAMAOy2CHNYVTCADssAx2WFM4iADQDstohzWFswgA7rAMd1hTOI4A4A7LcIc1hRMJAO6w/NQLKx9KYNG5F/zgCyufS2DR0Rf87AsrH01g0ekX/PgLK59OYNEBGJMTMOQDCiw8A4P5n5XPKLDoGAx+DoaVjymw6CQMfhSGlU8qsOgwDH4ahpUPK7DoPAx+IIaVzyuw6EgMxjusk48ssIB3WMY7rJNPLbCAd1jGO6yTDy6wgHdYxjusk88usIB3WMY7rJOPL7CAd1jGO6yTTzCwgHdYxjuskw8xsIB3WMY7bOQXS/gVCQt4h2W8wzrxKAMLcIdluMM68TQDC2iHZbTDevFAAwtgh2Www3rpTAMLUIdlqMN68VgDC0iHZaTDevFkAwtAh2Wgw3r5cAMLQIdloMN6+XwDC0CHZaDDevmIAwtAh2Wgw3r5lAMLQIdloMN6+aADC0CHZaDDevGsAws4h2WcwwbxuAMLMIdlmMMG8cQDCyiHZZTDBvnQAwsoh2WUwwbx3AMLIIdlkMMG+egDCyCHZZDDBvn0Awsgh2WQwwb5AAQLKIdllMMG+QwECzCHZZjDBvkYBAswh2WYw0ZsgXMuQDksoxxWPmrDAshhGeSwEVoIGTuAHJZBDhuhhZCxA8hhGeSwS3lHkQWQwzLIYYeDN3DIByCHZZDDDps68JQLIIdlkMMOmzrwlAsgh2WQw0Zogd8+YByWMQ477OnAESNgHJYxDjvs6RAOUAPexxiHHfZ0CGeoAfdjjMMOezpw5wOMwzLGYSOzEDJmwDgsYxw2MgshYwaMwzLGYSOzEDJmwDgsYxw2QgshYwaQwzLIYYc9Hdh9AeWwjHLYYU8Hdl+AOSzDHHbY04H9D4AOy0CHHfZ0YP8DpMMy0uGGPR34hDWAOhxDHW7Y0yEc4zf1P8dQh4voQjjJD6AOx1CHi+hCOMwPoA7HUIeL6EI4zw+gDsdQh4voQjjSD6AOx1CHG/Z04FP9AOtwjHW4YU8HPtgPwA7HYIcb9nTgs/0A7XCMdrhhTwc+3g/QDsdohxv2dGD/A7jDMdzhhj0dMGN2AHc4hjtcxBc4Y3YAdziGO1zEFzhjdgB3OIY7XMQXOGN2AHc4hjtcxBc4Y3YAdziGO1zkFzhjdoB3OMY7XAQYOGN2AHg4BjxcBBhL+BUJB4CHY8DDRYABM2YHeIdjvMNFfgGDNwdwh2O4w0V8ATNmB2iHY7TDDYd1g7zBAdbhGOtwkV3ArMUB1OEY6nDDpg6UtThAOhwjHW7Y0wEzZgdQh2Ooww17OmDG7ADqcAx1uGFTB8yYHUAdjqEON+zqgBmzA6jDMdThhm0dMGN2AHU4hjpcRBcwY3aAdDhGOlwkFzBrcQB0OAY6XAQXMGN2gHM4xjlc5BY4Y3aAczjGOdywqQP6HsAcjmEON+zpgBmzA5jDMczhIrbAGbMDmMMxzOEitsAZswOYwzHM4YZdHThoApjDMczhjPwdAwcwh2OYw0Vsgb8i4QDmcAxzOCt+uc8ByuEY5XDyKR0OQA7HIIeL0AJn3A5ADscgh4vQAmfcDkAOxyCHs/IKAwcgh2OQww3ndAinQAPvY5DDDZs68JQNIIdjkMMNmzrwlA0gh2OQw1mx3uIA43D83O9hTweOONHJ3/zo72FPB4440eHf/PTvYU8H7rzo/G9+AHhkFjjjdugIcH4GeGQWOON26BTwyTHgTsy4HTwInLlfZBY443boLHB+GHhkFjjjdug4cH4e+LCnQzjHHLgfPxJ82NOB3RcdCs4ohxv2dGD/A5jDMczhhj0d2P8A6HAMdLhhTwf2P0A6HCMdLqILIeMGqMMx1OEiuhAyboA6HEMdLqILIeMGqMMx1OEiuhAyboA6HEMdLqILIeMGqMMx1OGGPR3Y/wDqcAx1uGFPB/Y/wDocYx1u2NOB/Q/ADsdghxv2dGD/A7TDMdrhhj0d2P8A7XCMdriIL4SMG+AOx3CHi/hCyLgB7nAMd7iIL4SMG+AOx3CHi/hCyLgB7nAMd7iIL4SMG+AOx3CHi/hCyLgB7nAMd7hhVwf2P8A7HOMdLgKMJVzV7wDwcAx4uALwcAB4OAY83LCrA98O3I/xDhf5BU65Ae5wDHe4AXegxAPADsdgh4vwAqc9gHU4xjpcZBc45QaowzHU4SK7EFJuwDocYx0usgsh5QaswzHW4ZaFlBewDsdYh1vJKS9AHY6hDicfPO4A6XCMdLiVnPIC0OEY6HCrQsoLQIdjoMOt5JQXcA7HOIeL3EIYtQDncIxzuMgthJQZcA7HOIcbdnPAvTwOcA7HOIdbyQubHeAcjnEOtxKXtjiAORzDHD5iiyXcSuQB5vAMc/iFWOjzgHJ4Rjl8pBYruBPJA8rhGeXwkVqgUcsDxuEZ4/CRWUDP9QBxeIY4/LCbA3muB4TDM8Lhh80ccNTygHB4Rjj8sJsDjloeEA7PCIdfyEtbPCAcnhEOvxCXtngAODwDHF6Jyws84Bue8Q2vxKUtHuANz/CGj7hiBbegeYA3PMMbXsmjngd4wzO84ZU46nlANzyjG17JhT4P6IZndMNHWrGCO0A9oBue0Q0facXK3Dv75WLF7Qe+x+iGj7RiZe/t6ksX+P3A9xjd8MNnq+B6Qg/whmd4w0desYI7QD3gG57xDR+BxQp+ccwDwOEZ4PARWayW+H7gfgxx+AFx4GEbMA7PGIeP0ALSIQ8Yh2eMww/HkGPrgfcxxOEjsvBw/6YHiMMzxOGHU8jhKeAeIA7PEIcfTiEX7AfexxCHH04hh2zOA8ThGeLwwynk2HsA4/CMcXgjfyjcA8jhGeTwwznk+PkDyuEZ5fCRWuDVvB5QDs8ohzfix4M8oByeUQ5/PoocRw0Ac3iGOXzEFpiteoA5PMMcfvh21QLPHoBzeMY5/HAa+QKPvwB0eAY6/HAc+QLux/KAdHhGOvxwHvnCwhEcoA7PUIcfDiRfwE34HsAOz2CHH2DHAo/BAHd4hjt8AXd4gDs8wx3eigcleEA7PKMd3ooHJXgAOzyDHd5KByV4gDo8Qx3eigcleEA6PCMd3ooHJXgAOjwDHd7KByV4QDo8Ix3eyQcleEA6PCMdPpILjKg9IB2ekQ4fyYUQ+QLS4Rnp8MOJ5DDyBaDDM9DhI7jAkS/gHJ5xDh+5BY58AebwDHP4iC2EyBVgDs8whx/OI4e+AyiHZ5TDR2ohRK6AcnhGOfxwSMcCluk8wByeYQ4fsYUwbADM4Rnm8BFbCLcD32OUw0dqgYcNADk8gxz+vJ1jiRC1B5TDM8rhvVTn84BxeMY4/LCdY7GCUyeAHJ5BDh+hBXY+wDg8Yxx+2M4BnQ8gDs8Qhx92c+CBCyAOzxCHH3Zz4IELIA7PEIcftnPggQcgDs8Qhx/2c8CBBxAOzwiHH/ZzKBw4AcThGeLwEVngkQsQDs8Ih4/EAo9cAHB4Bjj88NEq7DwAcHgGOPywnwM6D+AbnvENP2znwCMX4Bue8Q0fBufDUSMAHJ4BDh+BhZB0A8DhGeDwEVgISTcAHJ4BDh+BhZB0A8DhGeDwy8H9cNQLEIdniMNHZqGUwRYAB2SQwy/l77d4gDk8wxw+cgth7gCcwzPO4Zfil4M84ByecQ6/FL8c5AHm8Axz+KX05SAPIIdnkMMvxS8HecA4PGMcfiV+OcgDxuEZ4/Ar+ctBHkAOzyCHX8lfDvKAcnhGOfxAOfDIDSiHZ5TDR2yBR25AOTyjHD5SCzzwAsjhGeTwEVrggRcwDs8Yhx9OIsfvHjAOzxiHHxgHfvfA8xji8JFZCAMvYByeMY4QmQUsNwSAOAJDHOH80Sr48Y0AIEdgkCMMX61SMFUOgHIERjnC8NkqOGwFwDkC4xxBPos8AM4RGOcI8lnkAXCOwDhHWIjTbgCYIzDMEQqHcwSAOQLDHGHAHLjSEQDnCIxzhIWc7QYAOgIDHWEAHfjtAdIRGOkISsS7AZCOwEhHGDZyQEAaAOkIjHSEgXQoWC0LAHUEhjrCcBq5CvAFANgRGOwIalhRDxlrALQjMNoR1EDaYNYSAO4IDHeE4eNVOOsKgHcExjtCgXcEwDsC4x1BDYkHjNwDAB6BAY+gC/W+AIhHYMQjFI7qCIB4BEY8wnBUB+4FgHgERjyCFlOPAIBHYMAjaHk7UQDEIzDiEc67OnAvAMwjMOYRhm0dQi8A0CMw6BGGfR0apg8BUI/AqEeIFAOnDwFQj8CoRxg2dmgYvgeAPQLDHsHI+UcA2CMw7BGMfExRANgjMOwRhr0dwjAAuEdg3COYQvElAPARGPgIkWRITwB4ISMfYSAfghMB8hEY+Qhm2FwEuUEA6CMw9BEiycDgMwDyERj5CAP50DiYAuQjMPIRBvKhcTAFyEdg5CNEkIHRawDgIzDwEQbwoSG3CAB8BAY+wgA+NKxgBgA+AgMfIYIMpfF8CMhHYOQjRJShNO4IgH0Exj6CdXIiHgD9CIx+BDt81gXPZwCABAZAgg2lnwAckSGQMCAQg4dTAEECgyBhgCAGD4eAggRGQcJAQYTRBGCQwDBIGDCIwZ0ZcJDAOEgYOIjBnRGAkMBASBhAiMGdEaCQwFBIGFAITMsDYCGBsZAwsBCDOyOgIYHRkDDQEIM7I8AhgeGQ4IbtlrgzAiASGBAJw6HlBq7lCICIBEZEwpmI4K4AiEhgRCR4sTQTABAJDIiE4dhyi/syQCKBIZHg5dpMAEwkMCYSBiZicVcGTCQwJhKGj1xZ3JUBFgkMiwT56PIAoEhgUCQMH7myuCMDLBIYFgnnr1zhjgzASGBgJAxgxOKODMhIYGQkDGTE4o4I0EhgaCQMaMTijgjYSGBsJAxsxOKOCOhIYHQkDHTE4o4I6EhgdCQMdMThjgD4SGB8JAx8xGFHBoQkMEISBkKCqowBAJLAAEkYTjF3uB8ARBIYIgnDuR4OOzJgJIExkjAwEocdGTCSwBhJCMNSfOzIAJIEBknC8NUrhx0ZUBL6t7/d321ff9nsj5unb16fNp/vvvrxx7v1T7v98e7+H3cP2+HfjL6POndf/ePOhLuv/vHP+7vVaviv8mr4H631+X+Wdvif0/ln8X9OHzYe/me5HP7ntA30q3/885/3ZOXwz2Rf/NvJ4PXT0+P74bh7Sc0JKjFnqVsbetkcDuuPm+yHmbSl8w9aLOiX0Q/Shn6Zo19GP0gH+mUr+mWqzaDn592vh+3L9nm9T006OfJo08lVmxt7f/35dffra96YSRsLbY29Hj/td2/bx7Qld2nn/AxO6yO7mtu9Hbe710Pa6jIkzS4bXeJt+/DLZn/Y7rKfqpPHNryvlpZ+3vyWNZLYY85usDp7gXJnt9bq/AR0OL98Q75z+oD72R3OveK0XX34n/FfVudrPP3ptFhj+J9l4yN92/6iUrt98uN9oPfT6DunxsC7OWUZF9+xy7bGDoft4bh+zYcPmzxVGjYWinoZdS5Nz3fpqZdR59L0WFcL6mVt3f6n9WHzvn/ORo/kV52fFNlkyKYV2UTv/HT88fA/K+r5jjq8JpuWmt7wit6wpTes6A23vZTHT+v8ESb9z9L4tCDP9PTkNHnmkjzT0pPTK3qE5IeurZecTGGPUC+TF3pujh6ZcmQDPTIdltRNyAZF/zM+suFJ1Y153qz3YEZIJ6hl4+/aPW1+et49/pw5/aWdtmH8cffy9rw5ZrPKCZpdfGzR5qmPu9fDcf/+eNxl04FPOja5EHkr+QGNUNoZ8laap8LotjQVO5qB6R143fjod6/HTd6xT5X5i3nklycUNfyPbhvRzy3nM8MqaXnR1s7TZvN22GyyF+oTC5Uef3LbTPi0+bB+f85/skveSGicss7tPJxGowfel5IZmkZACjMUDYB6saK+tKC+pOkVj3PP+V9OX4QdXrFpG2zIvJfd0ya3LXl4NLTQ4KxoZNFqQbYpss2QSfQ/5IfOtD75w+N+Gyel1KBV8rBOu3OGtxlW5G9tk9Tm83Hz+rR5yvzt0jB5L3UvetJqRcMsDXGGTDgddzX8UJrKTx8HO88KNMvTjOcpLPYUYPpl21P5sD4ND9tN1lNCFsO29eUPm+Pjp8nrXiav25o21760lBllkl5iaVZd0BThaYqgvEBTXnA6Mnh4kDSvWBrdnGv8bdvn4yYbQ0MafzSGWEMrf39fP2+Pv02elEmCCEvusbD08+hXUXKgKTkwbkE/jzorzZvONRr2/vrIu8WpopyEamc7gm3zqo+b181+fWrzcff6Yfsx63CLdBhum8k+brIRM7j06beNSB83x/VzHrX5tJXWH3b86bfDZv/L9jGbn0Ma5S/bxoy0rYk7hDQeGt51S4MgsjLLtAuSY9F0QKG9pnhYUzxsHHWXceyl0er0xZ9Wg6aB1SoN3duGhLGhfEJfpC21BVcfN8fz3BQf+fskfU+sI69XC5qgKFDSNO1rCn2No95nLD0tmjNds29N3MCnbhCa3WA6etrkWdFkoRaBfhf9HAoANQ2RhgLA00lA599FE69r9vPBHuaYNomMqfihKNBTNFdqGrU1DWuGMihrKHGiudK55keUjsP5k0oCfwqWFAVLigISTaOtpk5haAq3VM+ylJY73+jmu93H58wdfTLSKfI13xiUDM2hGkniVW7V6J2xseNul73DVVolUI2jwn739+w3+jQkpPhFN77J/e7vsNKQlrxsW1vbLHw7rY+4NEGZZzBtM9b29e2dJTbJQ/dUoQgU7gUaZENjGTMKPDzuDrlKGvuHxmD90tTD22b/cNz9vMljgXQKCY2j2dDo4fHT5mWdW5i+mca3PDQWDcte8yqZdj2Fx6ExIYiN7t6Pb+9H0HKaJTcmndvDT5v1frNfvx8/ZTlP0okpPKVqiqLoVCsaWQINveNMouh/qDroTNsIsD087x7XeQKW9jbKNc6KFDLTkKYotlSO4k/KCDU5rg5U2KKcyZixpE2hqRqr9vQvY4pJuYyjSo6zbVHrfx1YtTb5XeefQX3s/COoiymamTX9LENFVkNDtqXymiMG4YhBeEo6PM1anqplvjH0+3nz28N+sz7sXrevHx9AGcQn3ZgifeVGD6E4zLY55vNzFoH5NFgl3/KN2dnz88unzfpps8+6S5ot0GOmp6wcuQo9Zb2kGJMSOUud1y7H39Y2eD0/v2xfj5v9h3Ueka/Sak9j9f75+QXMJSYNM8/WUQyk3FiyoH5MRXlDWZwlD7JUhHGNadTz88s5P2CTWzrIN8KhyUCg0lJMaBxQXtb7n58YEUqLBY2NfH6QRnSVBuChMcI8NTgM5KjFReoJVCcIjZHiqenjp+3rz6eOCqaJJDyjWiUl7+QripxFU0cw5CyGyvqW6jZOj3CHSjpUBPAUjXo/1m07fgR4MInx49xJxlPnpdBak6ebcQwnqmkpmnFUbXRU0vOUaHqq7fkRJlGiGRqjYwBbTx9+uLxb0/g0hnZQ8TnFSqGRB52b2+/yyF2lfCU0gpFzW/lrSgHC2UtoAlCUASmatzQxJDOOqFQktDRLOSq8OopCPGW4nsIRT6mup6wjLMaaaNuIc0qtsh+S5qDBtzfChq3kcYwxMz0OiqqoX2kqnppxECbEZWkocBSTOYp/PCXGngIgTxmyp8Q4UBYafKOfTDJNn06ajXV/mEmnWJU6LvVbRQ6hqd9qImSGykGWBh1L/dY1wtloznH9Mff8lGc01iCHakz+s5ZJ57aNSAlXdVTSkhrfGzkNdR0azzTFfoaeiyEPsxT7WRqDHQXR3nR4dLTw+NsbszIFyY01tnh9OiOl/IwCLk8BV6B/CVTpCabtwe6en9d5JrdMCelqDCPbesPQHMrcF2nmZRtbe9u8rreZcSndduMrahuGh+agcWnC2RhCDK3xyolKx3Uf2robMimlkRTcUN+nnErTHKApzjA0VVsKLyxNzI5KwZ4SDk9BeViMdYsx2ml8Q0N4Nq1XpLNk4wSetBULFpdMalq6SIuMwTe+ftY+aDWtYfjGDiQFqKu0MKTHGb5tPnhb79cvmyNLx1YuHQRo6lq2DVBv6/0BRUc2HY6pTEt+oCjs1ZT+acr/DGXZdlzTRNOza4wCUpMeP72/ZjzepoyRcmUKaNVYnqBwXFM4bmhYsGPpgWYA1zinR7tQ0GbTxW30iCh6UxS+aZqsNc3WhqJsS7UTR13XNXrvYBMPm2wyqlK3VhQ/KZreNMWTmuJJQ+GDpVTdUQDlGh0/GgRGLpuu1KNHQ7OzoulZU0ynyWkMxXSWMmxHw5LzbdFGNImtkUyGUar1kpPTu9D00zVlPEaPgQH9DwXOjhiRowKOX4wDK1W5iXL4VcfL3e83h7fd6yEPHlw60oWOV5MPHUmMPsaL9NyH/1LeRi6ixpdHL0aTY2k9lqzGZWT05Kg5Y8cy57gClp4llRQtJSpu9MIxUTHj3NQ8Yh42lNJn/phSEeoZNKUqmlM1DWKGBgtD4aIdF2bSnOp8WxCX2TQd4JI8gbIORQ9LkZdqGsUMjRiGJntLj9iRlzrfVoEaDDtNWZOY2iWeQuhLUVVY0ZvT1HcM9R1DfccSh3SUfTrfFgadrdrl6VTaAWjKURTvKoqANXmQoUHDUF5sqZ7rKP9zvi0iuRg0eXnp2muacBRNyYqKhZqGMkOuYyg9tVT2cTR8uNDh6dP3lkzjNNsoyg0V5YaafNmQyxjKeCxN/o4qAi40T+PH7fr5cNxv1i9wEEuj2bBoa/VMcqeLdFQy542cjZ48uYEmN9DkBmbcWkB+aSn7d5Q1ORoEPT0DT0OGJ97gKScKoW1WGuPY/JGk62VCW1yy33zYbw4ZhAppFXusUa+62sPJtUveGUULigZ+ReO9pjHI0FM1lHBYGssdRZOu+Xd+3B74W1+kiVUjC9xvXna/bKaLVZZpZaVxjkaO7dKAnPoaxfqK0itNI6KhAchQNGEpinQU2TqKbMOi8TfyCuUiNcq2dbbD5jWj5OnaVZoHFZVNFNWwNY2sZjEGEzS1UzTqKDx2jcELWIalkpByhE1nIZoAqJ6pxlV21HM1jb2aolA9LiscVz2PzJTiUkORgaWXZmmwsRTeWRJ3ZoxfxpX9NFyQ83ua4z29YU+pVCALw1iwp2cdGkPf81PjZSOVemhoXGkE4dQiHa9Mo1FxRsiaSYIxmi2X45sbXxi9Hj++Huov5GiWhmNnxidO0Tc9RE/xi6fMzFOSGBY9v2D7+vEUBTyylYYq/TVB9bQIZ0mdFtJV47s67rePee0lLaX7xk1vh/e3t93+eHhYvz9tdw/TZS5plSA0hr+s0aFSkreaxgWNsevYKq1vfTi9Fz69ppl6aIw/x5bf9ruXt+PD4/rx06TdtAjeGEaO7eJIIE2YQ2MMODb5y5Zvb1NpwhMaQ7ixvV83Pz0cTjtnPuVtpvi2cRY//HY4brLe77J1PTSrUOpDkEJTZGwoCDPjgEs5qaNU1DUGYYMxD9vz1hm+WyCNLVTbzzuuP+YjZJo5hdAWVBw3L2+nJdXv+3woSB43xfAUwiuKuDRFXJoiLjNu3aInZ6l+56h+52i9padH6Cmr8pTN+8bFgzG1zdaLJI+AQkWKFBVFQZomVE1RkKH429JkaWmOdFQHcCOKGJe2jPV/msU91S7Com2EQvUCl9ZtyXYqXSiqWGjKlgyZbGhGshR4OLLUjZtaxhl/RL6NAGzCGXRaF2rcpXRqJI6WjESnY9oIXxs37aKJ0aVLM6h7U/qlCMdqyg8MTdiGvNVSrOSoUOMoHAuNPG5SR0gfGIXoFKErCmQ1xReaAlkzbi8hK+24TovqvI7CCk/mekrBPdntV22TxXH3NkXKKk3GQiNVOu6O62eAZtK33ZgeDE2BtSbpNujQSPUmfDTFuJ6Gi9C4XvD9FWeLaRDUuApqypfTxCOMhJk8OTSGwe+sKO3TUJoGFkpAFJWVNaWGhsYKQ4OipcDWUbnQNfrEL5v99sNvj7vX181kEvTpJEidlQJvRZURTYmdoZHCUH+wNJE4ypRcY8bx63799rZ5mg5yaShj26bUpK1JwJ6u+QuNKxE/MwKdERfq5Y1Lsj+/5PQmIVz1+/92f/e2fds8b183d1/9+Ld//vP/AlqrELI="; \ No newline at end of file +window.searchData = "eJzFnV2TI7expv9L67bPuPFN6k62dbzalWwfSz4bGwpHB9VdM0Oru9km2aNROPzfNwpgFoGsF19FevdKiukqvMmqBJCZD1D4581+98vh5ssf/3nz8/bl8eZLfXvzsnkebr68ed3sD8P+cHN787Z/uvny5nn3+PY0HH5z+vd3H4/PTze3Nw9Pm8NhONx8eXPzr1tqxZ6bedg9Dr992j38PDX0/u3l4bjdvZybmq4Bjd6Ohgwvx8giqPP3w+6lIDH++YLWnzf7nx93v5QU6JILVD77G3MCn5+fetsWcjU1/tXL8eN+97p9mCROrfxm+kvxnRoho5f6cjju3x6Ou32ltS/SS7HhZ9Mi0+/k+cH8tDkMf90/1bTGy9726S84Hh7/Y3v4j+3Lx2G/PQ6PXbrD5+Pw8uhvKgpH111JOXHnjOrpmispPm8+3x8/bl9+3r58uD/ufh5eDjUD8C3XtKfdjGurD4fD5sNQ1z5fdy3l3eNQ9XS66Jqafx1/SJPw2+nKK6nvXv1AV5M+X3Yl3TBe1mSnq66k+o+3zdP2+Ot/bp+OdfHTxe/p4ivZcBj2n7YP1Zd9vuxausf9sHmuytJVV1I9Ds+vw35zfNtXf3F66bX0xyGyqny66Fqau91TtUfRRVfS/OrP39z/99d/+f6bP/2xprx53d5/GvaHLZvBuvR+//V/fvXXb3+4/+1X3399/9e/fFsTfRzeb96ejvdjmHDP44RFyt/96fdfN8vOx+wuze3ht8NmP+y/ejt+rEluDz/5azfh2iu93+3h293Dpjo3bQ9Pp8uuOk5Xvfl82f+XUfI/RNe7lVYKfdbavG7/1/BrQ7f52V+1/Bcy3YePm2NDZD1e1ttjmFKf97CneeGvfHp6/h/D5rHBiZ6enj9OVy7+rU9Pz39qC2qenp6vEdcwfT/UHBrea7jwwjfb1z8vfbN3+pzTbn7a7Y/VTnO66Fqaj4/fhaC/Kvz4+DxdeS31w2F7OG5e6r86uvBK2uMg0DJQXE/x/XB8+Pid99GasL/0mS69lr6Pu/8rBOHftSRm4Y5T2N4/4yfqH4bj79qG5w/jj5+P0Bf99g/D8fchcvmuOTv8MBxP0c6V8kRuUZsvfBiO1/aESbvtbeBx9VILYkdseQixG17xWfiB/He7l+NQH4b8tQ/TtQt7Qqz4u49v9Wwqln043XCJ9neN5SB/8VVqQsCGlvEnGHDZuOPbaIxf/LUogulX/OFUSWySPJ4vvoZmu1OR8DW86oexaNk0sAbp8fL5mLpEt6E4cJLkFYKFah0PeLz8Gk+3/cFeeZ7aD+/3w+Fj+7R5uuHfMWfuh8Pr7uXQYMJ03ZWUD8NLlZucrrmW4q+H41CvLtJVV1I9Ng5bcMS6THm3e/rd5qk6J4zXPYTrrqT81lA3f7u0ap4ofhr22/e//m738jJ4LFpTD9c/xNdfUBWKMeqfv/kk5urjv14Hn04ttaFTb04vNj1rdCPTkl4el54F+1FpSRFj0rNaHyItKTXh0bPwRWi0akeb/DVVs/FvpNkd9hYVcbQbyfUh0KpWZqZmgp3Tc0k1iz3Pkt2lwZJeDnee5XpRZ0mtgjnPogsRZ0k7W7g/q3ajzaJeBmtGcp1Is6RWxJlnyUUos6iLMWak2Icwi1o4Q4m0+tBlSauOEc+y7QixRTGDD+dylYS+pFXGhmepLDLsUMoAnlikEw2W9P7X1//n/i9ff/X9n/74zR//cP+7P/3xh6//+ENJ/efh1/v9sDnsXsZJuVqUqo+lRQ/txpHXGdFKGJIpNCLIs0QvfizrZdFjFAHXsGNZod0je6BUWbOEGs+yGcx4mW62ZJfoXhpTtKNGFjqVMWNZpb2/XfImG9Bi1Bn6sGJRq4AUI8EFOLGomkeJkWg/RixpYoSYdvjrKBXR4VlwETYs6jYgw0i+AxeWVEuo8Ky2BBNWVBsQYaJ/AR6sWFJ/1wuwYJNm/akvwIEV5QoKTNQXYsCSBWUEyAbl3kirF/1huQpCqGrmkR/Tu7ju0Yz6uPDy8aKM+JhOFe9VlfJoj0nVsV6zVpuzNOK8umoB5XHJBozXoFdJkFvwXZtK44NswHZVvbYHeMV5owHVncUvwHRlG3KILpbuxXMlRYzm4lSyB8sVlTJILtLqxHEltTyKY0WqLgxXVMwiuLRU1YffSooYvZ3V+rBbSamO3M6qy3Ebr3rEqO143Dx8fIYhxvSn60C3tLk28na2LlO0OQUiP/z6Cvr2XHG89hiuXar4uDluqlKni5ZqHFt+Tv/v4MWhXGyJn9tFWtvD73cPby1+9sX28Hi+9BLFb57hZDOT2z4X44QGrcPubY8qh0xquqxHKUl897vn324Og9VVrfHSn+jSS/TovcGqPRCll1esRDUp/+Gb/2xS/LB9f6GS95TWH+jd5fJf9z///PUfmvT+/joUYvUmrT//vu1Jvj5e+iT//Me2H/Vayj+alL7/7zalw6dLlX745j/bHt9x+/7S5/e/v/7tn5u0fhl+eu2cYaJp//fD8Pr9MMxTDvrDVab8pLGmCX+yq3O1TarUu+Cmpppdc5PKdi+7qen+Ha28STX/3rP4pqbXsv4mlb9kCU6TNc1GXFk7V5hiyr1lqaouLEox0a5FOU2KOC0Hsn0peU07tzonFe6FaTXVzBqdVLRzmU5Ns7xSJ5VetlinZkGObqfavUt2qqp41Q4T7Vu4U9Msrd1JhZcs36mqwxU8TLdrEU9VEZYpmWLXUp6aYnU1TyrevKCnVRev6cGi5QJ+TbG4sicVXPQ9gLo+Xl3BpfuW/NRU21b9pDbUF/5cPmbXvLx3OdBVx8zCoiCg07YuKBXqXBpUV82tDmJxeyfOrut2eXXHYpO6cmHlUCq+YPFQk3qOB87UrxD1NK8iAoFeF0iva3f13wvfeH2FEetWXYuMqor5dUZMtn+pUVU7u9qISXcvOKopwzVH82HkanqllUep7JLFR1X1+vojZkR5CdIlthRWJaU2LFiY1KBdX5s0s2L58qQGe5p8on+RUrNy03u4zgjbtVppZsOyBUs1O4prlsBgf4U4sXMlU96I+cKKiy3Jrm8CVlyjltS6ygnJX3VUKq59AurXiHbaV0QBA7qXLHRZ0OyK+aVSl9uSXz2FDMELqK5gRb1YkVlWdR3t9leBF1tdbEXzK7ju06+vwkpNWL4Qq25JZi0WN6BzOVZNF67I4ql8x6Ksqh5el8UU+5Zm1TSzq7NAAfKao112jda8DNm1TKumC1dqpZpdi7VqetX1Wqn24iVbqDIV4ds/7HYfnuYdKPzzVdBt1FQTuD1Z1IltY5VeaFtWzCLbWLIb2JY1Ia6N9bpgbVmrBdXG0peA2gZLGg24qm4utk5Ue4PqiiaMphPBrjC6QQ3P1TPJvim6rJvDsrFob8BeVswg2ViwE8iW9co4NpZdBmPL6jmsEOv2gtiKIsawiWAfhC3rlRBsLLoEwFaUIX5NNLvga0UNZjOJWlcaU1arYtdYuBm6tmli5IoEy8C1rFbErbHYItha08ZQKpXtA60to17Zf3qR5tVGngLOnGm0wcxYpBNl1hRzIDOJYivfOahpdPhHB8CqqRaAZSzc8En1BqVcwY4pXTz3N4PJWZhT/L5BTaejv130BusAMukKXfixopaHj4lkP3qs6GbBYyLbjR3LqhA68i5/Ja0ScIwll+DGinIdNiYGtH/toKxbAIuxXgYrdunUISJTXI4Qq7Y0vON+fNio2vCsl4yCXaCQ6S3DhGUbipBwNhjXdp+1a+HSf06wr/TfYEUWBs4suLxq0QoC59IXjBhF6DdTqn3xoEErC/hmYtVvHnSoNbrRMpTXYkce5M2NqH8HoUmxluY2fAmhVaf18fbjuQYLGh/rNeecOpiL5ZdjuZoVGSiXinciubImBHJpKtqB4ypaGMYlan0orqyXBXGzUlQPhqto5iAcL0h1IbiyJgRwsV4XfitrVeFbrLsYvc0rKDF42+/+MZfd7/5xHehGDbUht9GWXuA2KXTjtoJaHrZNcv2oraCHQduk1YfZCjpNkG2SvQix1axoEr+iZjZWPSt2B6klPRydnsX6sFpNKTPPpnKd02tBMwvUJsHuklpBLQfTJrFelFbQqoC0SXIhRisoZ0vZk2Y3QiupZQDaWawTnxW0ivBsElyEzkqqGJyd9fqwWUkJZxNnpT5kVlCqA7NJtB2XNehlYNlMrJJsF5TKoGwSWobJiroZCBJJdiKyglrbPsSzdt/Hx+tjaMlLu8HcNcayEpRL229EcpNAL5ArqmVx3Dm+7dyJUtZr9soerFNULGG5SXTBHsKqara8FqteGE2047k0YOra0VLWbO5/F7zRBkx37h59kK6kVEB0Z7kFgK6kmcdzZ8l+OFdQxGgu6f5X0SliuUluEZQrqTYgubN4B5AraJZw3KS1YI9fRbMBzcXqF4C5sh3Vd7wAyrUoVp/35SNfH56LtRfCuYJ+Gc2lg2/v3r1W3QwAgOKdAKBmQR7RpeqX1j6a8RyTvaQK0oHqUtUqqKvp5DFdKtRfDm9VbnKphcCuakMB1zEDGmBdXa2cXC/YW9em2faIF0C7mnrTo73eXNSA6ybpC2Bd0YIcqouEe0FdQQ9juig17YF0JZ0MojsrdQK6glYez6UFr2uNRnk0l5S9+sBcQQ9juUmrD8oVdOpIbtJcDuRY5STCcd9++91M8dtvv7sKjKN2mljcaEgnipvaRySuse0seJsah9ytsXWI2aaWZ5StsdUWqDaJ1Jhaj2aL1HKFXOR4bh/FjK2tw+jw3HQxq6y1i2extPFyVFJQyKGvqfladFloOwO6pqYB52psuYy1JoE81WrUyRV+JwXEsFrbxsjq3PScWDW2XAJUU/M5PtWqAXHUufUZjWptF8bH53aLS9gK7VZR0yTRTJoa1DBomkktHh+KmGmSaTrWtqiCy/eRwJwpdY0RhTeOOM6Ve2+B2qStt0GbqXnAbJrbziGac9RTWdRfbL31jS59MgX8Mkk0bImqaeQKJLFG3/zVjFbS6be44L+o0Or/7W+ijk3ODjqjJq3t5iHJuXHMSFoVskjkLACJSGP7EIAknWtJqyXcMTWeox2tGnW4cZZqZxsFxQLamJQathmVFeogI9Yqc4x21dq7wtSit/3ak+seQ7qQRKyUJxKNakUAkQ5atfUdjSq4cAmlykcv1vSycCHV6swTW1ECE1naV4vgINXonBnbsUEqU93a06rT4ghtR5lWFfNIgMnViUBdq5jwNGzcaVJoenj140trWi0PbfHzqpf2J6FyZb9ZL1PIj2RAHb+xdVi2j1KTl9aaYEuR/tzuvEbf2HK2JJ8m/Qt7erYAn6T+s/p7Y+uw3D61PKu2N7ZaLa5PCsXaeiGbjUrpBa8//+kqhXXWXFN9PbKuMxnnarWKWl2pWurhks0VH6gNQ76qJkV9F2g943CPK1VivbrOaSytCp2vW6q0eXz83dvhuJuPX1xr8/j4QFcuVXt4Gjb7Rj1/7cWK7zdjJ9qCEJPrxVcuVsPV8ZlUuTre5PMt7n6ZwldgagAipfmhSee3v37fOFB9GI4//bpotMpqNg8dk/ClY8iY2rf1gDHBv9T/J7V6B5jk/h+OXP8hL9J63n0aGh9muHjJ84xDgj89PW2e52ddh3++SigQNdUUBpws6iTtsUrvtteyYpa/x5LdW1/Lmn9HVD7W+3vP9teyVgurj6Uv2QLbYEmjAVfVzRVtEtXe5aAVTThMJ4JdC0Eb1HDgP5PsW8RW1s2tDIhFezeylBUz6wViwc6tsWW98iqCWHbZ9tiyei4hinV7t8hWFPGKg0Swb5tsWa+0DiEWXbJVtqIMVyckml3bZStqsISXqHUt6C2rVdPbWLg5tW3TxOsZkGA5MC2rFVc1xGKLts/WtDEZT2XLyx1aRrmyv/RuXr3aSFPAzDONttUQsUjnJtaaYm6NRBK1VqBkTaPDHzq2OtZUC+snYuGGJRQNSjlaxJQunuub11bMwpoiGm3SaZK5RKWjV1/kJ/XVHUmH69oWW1HLr/lIJPu3xlZ0sytBEtnu7bFlVbg+hA8sV9I6rxopKp7XjXR4aevqFCzUM650rlFJBJcfv1m2o7ByJdZfsC23qltfz8IsWL41t2pLwzvv357bqNrw7Pu36Fa1yytimP6ybbplG4rrZGaTQm2pTLsW5vw5wTLsb1DNrpmZKV5eg2ldSTOXvmDMLK6nmSnVltQ0aGVX1czEqri9Q63RbZZtwW2xI7/iZm5EfdFNk2ItaW9YetOq0/p4+7ffNljQ+FivOefUV+rE8su34dasyKzfScU7t+KWNeGqnjTR7tiOW9HCa30Stb4tuWW97AqgWWGtZ1tuRTO3LoiX17q25pY14WqhWK9re25Zq7qGKNZtXkY0rwfF2PB1ePnqm7mQ/+frYMNzU23YMFjUiw0jlW5sWFTMY8NIsh8bFjUxNoz0+rBhUasJG0bSF2HDuiWNBlxVNxu3xqrdIWtZE0ersWBf6ltXy8y9XLJz0i3qZrFhJNpdSiwq5rBhJNiLDYt6FWwYyS7EhkX1bDE/0u3GhmXFDDaMBTuxYVGviA0j0UXYsKyMsWGs2YcNy2o4A4nV+rBhUa2ODSPhdmzYpJnBhkCwkqoX1crYMBJr2g9d08pgoUSm70u7LaNc2V+6seG1RpoSNuQajdgwEunFhhXFLDaMo9YaNqxodPhHDw6qqJawYSS84Lu3DcrZIlmqfPHc344ReZhTxogVnY7+d9EbbQB8cdfoA3xltQLgiyUXAL6ybh7wxbL9gK+oigEfGwKupFUEb5Hkku/gVpQbQFxsQPt+8bJuCbxFekvAW023AbylFlwA3mq2NLzzBeCtTbXh2S8AbzXtCnhL9ReCt6INZfDGB+sqeGvWypT4M4IV8FZXzYM3rnh5FaMZvM2kLxhByuCNK1XBW10rD964WB28tas1uk3bvvYW3QJom4k2gLYWxVqa2wLaGnVaH2d9p3uDYuNjvOac0gDWIvkLwFrFihxYS8R7wVpRE4O1JBXtAWtlrQxYi9U6wVpRLw/WeOmpC6yVNbNgjRWg+sBaURODtUivD6wVtepgLdJd/O3beQUlAm2fAWX7fCXE9nlKUxr42ud+uDa130vWClpZrDaJdTO1ghoEapNSF00rqLSgtEn0Eo5Ws6FF+nqKuejzrNcbdZbUYLR5luqiZjUdPKumYn1zaUExB8smud5qWUErg8k+J7FK++hbUCoDsklwGR0r6OYK1p/PIUIfFytpYSh2luojYgWlEg6b5JawsJImBGGfk1DkKjowN/gchx9X8f0q/Jokm8lXgxrGXjOpcpZc0CkCr0lm0Sa5oipGG5FgH/cqaLUdMDkp18+XvGjsLHhqL3m7wihWYG5p623AbWq+k7YVtXKo7Ry9dlZ/i2qtntnBZ4p6Bdw2SS5gbTXNXFEs1rwsbmhGbGlY1FVJLiq29rflb7KO2c4doouxlXTygO0s1k/XSopZtHYW7OZqBT0I1ZLOfg2VEk6bxJawtJJmHaSdpZdvZytYUEBqk/ICnlZWrMO0WHs5SStbUXvX/QytRa/2pC8e87rQWay8jJsV1IvQLB10L4uqOvEZlO7bIFPTz4K0VPvCekYrQmOi1xo7ijAt1bwwcmjHaqlsd3W7VbfFsZZta6takEdtTB5ztsu0i4n1glMmmxSbHnb/JreadstDvtrzrSO4SXg5fyvqZ+BbJNtJ3gpqELtFqWgHcyupYOB21umjbQWlLGpLi1tXGomykC0pcXURtoIaxGuTUhdbK6hUwdqkuJiqMXVjJ/GvXo4f97vX7QOfzLYvx2H/fvMwHH7DrynytqT0wkojtTZ7SyUz43NmHI+bh4/Pw8ux/fd9kd50XYM4JKwa0wsNWw2ZEcSqJd1EsdWUBC9WzejCja0mlNhj1aJLWGSXgUvs+jeZw0P9ujG9UX+zKUn0X7ejKxFoNYIRwaoVnYSw1QyMC6vWLMOHrUbxKnzVnF622GxIChrrdvSBx1YzEIWs2rKESjYblCDKuildyLLLiDiabLOjtHa0WTrJ8Oq6XclewYg4GvvDbvfhaVZXiGxJLrhGHDZvsDcIS21eFoEhKxaEX02mFGIvYEZv4NVkwqnu1vooosuxizeJlkI9INod5zUZ8WF4Gcev7e7ld7uX91vYz4Ex59se6LYLnkQu0gTCXWFmk3hjjAlsuSTAbDet26J/hyGFuBKZ0RtUthmRiyiRBV3hZJN8PpYE+p2BZJMB1SgS2LEshGwypxA/AkN6g8c2E7KRI7KgL2xsM8BX7e63pwXK2+ZhDN54wQhaCV6BBUsi1zZTcmErMqIrZm2Tz0WNSL4rZMzJz+PFkRTkTRj/2h4pPg6Hh/32texbU5NfpJeXPMobmRH1/2lQO123VOZ1s988D8d4HUxBLLm6QzJ+P9+8vL4d//R2fH07/pCdWGcXtb+t7XhrYc7GTX/BbsM/bm57bs21v6bfCn5fvxnxo/722+++IUlkRPz3izaMlNps2jySWJp5plO+2Ca7iS6/QPTP33wSjYKv20/55ZJNYlN+16gYX79c9vfD8Pr9MMAZYy76OAyvh3D1cskwSrQJfqBrL5Db7/7RKBauXC41Hq7apPT0VDwhtyoE2HBRr+2Y4qosOw+uKLmjay+QS78jWZZ7HV4224vk+JrQoh5aj90reArKGxWjq5dLfm59np/7HyZbRPBhe8gkSnO16Orlkm8vfaLJ9X2y0TzLF9xGkqc/tYcvD2yBXKatL2oflSCbMjL7HR5+E43TRa0C8SMJw0ShaptccI2q7bzBZUcBXVa1RVYsqNo2mVKo2gIzlh16eUEBFdiw8BTMhbVLYEBX7bJJvLF2CWy5/JzMi2qXOYv+HYYUapfIjGWnOCytXSILFhwPU5Hnu3DLBrSd8bC4XAoUFx16eWm5FNhxySmYy8ulwJBlx2IuLpciC5ack3lZlRJYsfzgzKVVSmTErErZL5erSiK5BUdplquSIY8pxUPxBVeJh2YNLvvG6YXxELBiSTzUYkopHpqbsexr/pfEQ3MbFn7ev2KEL2U2WkDXZjpWi1w2/Jqr9YVfLeJjyFIttgJL0H0XPIPWKBAbctmxB5dFgRmL/h2GlKJAYMayTwoujgKBBQtOSlgeks31Fx0ocHFINrfjkhMGKuZMH1JoNCW+/oLuWooE56LLTjpYHgkCC5YcfXBhJDi3YvlZCIsjQWDEgsMRFkeGQH7BaQktkWGOV5//ejVezZps4tWRkZ28mqsVeXVdpsyruVgDr65LHo777UMhnjrLTVculTr++tr0EE/XdcikHlfIQq6Yf5Qzj9Tky3KMhuyiQ66YR1QyiA6Zcq5QyxI6hPIBeik07xBojn474t5e+Q7Vy8SKUWwtfu0RykeqxRi1Q6IUjZbj0A6RhoizKdbskCyGd5XArkemEMKVg7cOkWqY1hCg9cjlQ7H2ctzicKsYaJUl4gnuz3mKf/pTcYJjTR23m6fv/Tv8C98VnzY8v7AH+z6/Pg3Hzqa/iG7DDwrb388oCjbUWEWXCYWuWzCh1pH7TMh265IF5U7eZUC2Dxb0yz2yJh97fMnJl/h1djkD9+TyeobaQyvNy6lSdWK+oJv09YwLOkOf/zd5XKYKA/ysVH+pSu12T/fjxyganl9y7TK5t9zCnFSpvC6u0mNCt9q+fPiBf4kj0ptd1LGiGaYIuMEvtvk0YW5nL0DIaRYhQrNsroSQUS1WEkqi83dXGvOWz+jZkQ/P5H2fPGubSkqjIrKiu+h+cWDRGlFcbkRhNG2NKS43ojTSZuf1ng8FNZpRHIWhIXgsvtyU7AiNrOj7wFVTvJOrvF615tpXbS2VCMM+lcPDxwGvwj4rsSv7pXLjcWMxt1CBLE2U15ofm6fFxbNhzyS4dO7rmPIqM112q8Bf+xYoB7d62B3gMzk39kVyITb5r6UlyrWNXDOpTmhdFH9KPvKNVedfoG9u/rT0oOEpplcul2p6jvkVEYsf5HF33Dy1/NDkwgW/M9zf9DPZpW1icV/63/vN6+vwmJs6oj+396v3by/Z7cu8wS+ii7H1sYWdsGkmVqRNWCjzsHJjPrvk6g/tPIL2PDg+mN6tnTDnjZB+ayBnaeOzOvwm/lO51MjbnOjVD/HbObWa/LGr3XEDHDY1+ktXi98ks2Joy/9bVyvffvvd93wHVGgr+ktXi6dtKfw706HR9I9L2v1LvG8maXT8S1+LCeA5tTX+W38rP2w+oIZ+2Hzob8uPe8D70j8uiRfuX4d9GHXrTX+RuS2zFyk1vMAJYVCRtwLccqkFeD4um9C2UrHdhi7x66juHhsc6ovTZZcqwW4FpPLRW5tWFJx5Lz2fC9Tq5vUWrmnfUquuYQsvv+TVa9XsRr2319fd/ni437w9bnf3WzBfQXF82xUtCQ+335TpvqvYQoGQr+nEBakGe8C9V7Hpdb97fj3eP2wePnZaNLvzKvbMV6c2mFJfotppxaftIY5wG0yY7riK/i/DT/eHYbN/+NhjQ3LXJXYcNx9aZqrTZZ1KPAIKywNmOCDIpn/siqz+Cy6CCc0mf+tYeff0tPvl++3z9mlTb/ELf/Vhuho/ptTKku5fX35+2f3CnTKn+zZdfYHucffKTq3Jih53r+DYmhZF/t5OKcgf4+pckI3+Unxn6tzY6eSiqaFPm/1289PTcPjN6S+oob/d3mxfHofPN1/+c/wqvu/YX97Id+rd+ub25v12eHo83Hz5IxUQH3bPp+/BPO4e3vz//u102X8P4yd4xovD1b+5u7n98e5W2XfSmr/97fZHutn/wf8DtXH+F3+juLn9Udxa+W61FsmNYnajSG6UN7c/ylu1fmeUS26UsxtlcqO6uf1RIUU1u1ElN+qb2x81ulHPbtTJjebm9kdza9w7t1LJjWZ2o0lutDe3P9pbLd7Z1Sq50c5utMmN7ub2R3er797JO53c6GY3uuTG1c3tjyt042p24yq5cZ17HevZjevUAUZ/WCNJMfcdwZzHe88dkhXAf1IHEqNbCHGrVu/WQqY3z31IpE4kRtcQEt489yOROpIY3UOoWy3fGclunvuSSJ1JjC4iNHxgc38SqUOJ0U2EgTfPfUqkTiVGVxEWPu25X4nUscToLsLdKvVOa3bz3LdE6lxidBmxgspz/xKpg8nRZwT0MDn3MJl6mBx9RkIPk3MPk2yI8mOUgMpglEo9TI4+IyUab+Tcw2TqYXL0GalutXqnVephcu5hMvUwOfqM1Ldav9MqHbDk3MNk6mFy9Blp4M1zD5Oph8nRZ6S9Ve6ds+mIJ+ceJlMPk6vcHCTnDiZTB5PrrHfKuYPJ1MHU6DISDrdq7mAqdTDlHWyFnpeaO5hKHUzJ7PNScwdTbB70DraGymAqTB1MjS6j7uDNcwdTqYOp0WWUgDfPHUylDqZs7jWruX+p1L/U6DFKQuG5f6nUv9ToMkrBm+cOplIHU6PLKNin1NzBVOpgenQZBfuUnjuYTh1Mjy6jLLx57mA6dTA9uoxy8Oa5g+nUwbSPtKBr67mDaRZseQeD3qlBvJU6mB5dRkPv1HMH06mD6dFnNPROPfcwnXqYHn1GQw/Tcw/TqYfp0Wc09DA99zCdepgefUZDD9NzD9Oph5nRZzT0MDP3MJN6mBl9RkMPM3MPM6mHmdFnNPQwM/cwk3qYGX1GQw8zcw8zqYcZH89DDzNzDzMspPcxPfQwA6L61MPM6DMGepiZe5hJPcyMPmOgh5m5h5nUw8zoMwZ6mJl7mEk9zIw+Y6CHmbmHmdTD7OgzxqCIxM49zKYeZkefMdDD7NzDbOphdvQZAz3Mzj3Mph5mR58xK5iDzT3Mph5mR58xa/ib5x5mUw+zo89Y6GF27mGWJY4+c4QeZkHumHqYHX3GSpSw2rmH2dTD7Cqbs84dzKYOZte5tNXO/cum/uXucpmrm7uXS93LiVzy6ube5VLvcjKXvrq5c7nUuZzKpq9u7lwudS6ns+mrmzuXS53LmWz66ubO5VLncjabvrq5czlWmXDZ9NWB4kTqXG6VTV/d3Ltc6l1unU1f3dy9XOpeq7tsgrCa+9cq9a+VyKavq7mDrVIHW8ls+rqae9gq9bCVyqavq7mHrVIPW+ls+rqae9gq9bCVyWagq7mHrVIPW9lsBrqae9gq9bCVy2agq7mHrVj9a5XNqFagBJZ62Gr0GatujXi3vktzk9Xcw1aph62zJdT13MHWqYOtRdY713MHW6cOtpbZ9HU9d7B16mBrlU1f13MHW6cOttbZh72eO9g6dbC1yaav67mDrVMHW9ts+rqeO9g6dbC1y6av67mDrVMHW2erFOu5f61ZjXWdTV/XoMzK66x32fw1/C29Pfq30/0im8KGv/H7Wbn1Tmaz2PA3fj+ruN6pbCIb/sbvZ0XXO53NZcPf+P2s7npnsuls+Bu/n5Ve72w2ow1/4/ez6uudyya14W/8flaAvVtl89rwN34/q8HerbOpbfgbv5/5n6/d4+xWoFL/rNYvsgmugNV+5n++go9zXIEK/rzi74v4OM0VqObPi/6+jo8zXYHK/rzu70v5ONkVqPLPS/++mo/zXYGK/7z67wv6OOUVqP7PAYCv6eOsVyAEwBmAL+vjxFcgCsAwgPCVfZz7CgACBCMBwhf3cforAAsQDAYIX9/HGbAAOEAwHiB8iR8nwQIQAcGQgPBVfpwHCwAFBKMCwhf6cSosABcQDAwIX+vH2bAAaEAwNiB8uR8nxALQAcHwgPAlf5wTC0AIBEMEwlf9rUZpsQCQQDBKIHzhH2bGAmACwTiB8KV/ayAqB6RAMFQgfPXfWnw/cD9GC4QHANbdGv1OC24/Qp7M/TwDsCsUHAuADARjBsJjALvG9gP3Y9hAeBTg7vD9wP0YOhCeBji8VAHAA8HogfBAwEGeJwA/EAwgCM8EHFy4IABCEIwhCI8FHFy/IABFEAwjCE8GHPY/ABIEIwnCwwGH/Q+wBMFggvB8wDl8P/A/xhOERwRuhe9H1J35n6cEDvsfgAqCUQXhQcEK+x/gCoKBBeFZwQr7H0ALgrEF4XHBClb1BKALguEF4YkBHr4AXxAMMAjPDGCSKQBhEAwxCE8NYHlPAMYgGGQQJrtaSADKIBhmEJ4c4DUqgDMIBhqEZweZZSoANQjGGoTHB5mVKoA2CIYbhCcImcUqADgIRhyEhwiZ9SqAOQgGHYTnCLklK8D1GHcQHiVkVq0A8iAYehCeJmQWrgD4IBh9EDZfXhGAPwgGIIRnCpnlKwBBCMYghMcKuAQoAIUQDEMITxZwFVAAECEYiRAeLuBCoAAsQjAYITxfwLVAAXCEYDxCeMSAy4ECEAnBkITwmAFXBAWgEoJhCeFRA65TCUAmBEMTwuMGXBcUgE4IhieERw6wZiQAoBCMUAhPHTLuCyCFYJRCePCAC4QCcArBQIXw7AHXCAVAFYKxCuHxQ+bxA1ohGK4QnkDgSqEAwEIwYiE8hMDFQgGYhWDQQngOgeuFAmALwbiF8Cgi8/qB9zFyITyMwFVDAdiFYPBCeB6RqRsCfCEYvxAeSWTqhoBgCIYwhKcSmbohgBiCUQzhwUSmbgg4hmAgQ3g2kakbApQhGMsQHk9k6oaAZgiGM4QnFJm6IQAaghEN4SFFpm4ImIZgUEN4TpGpGwKsIRjXEJ5VZOqGAG0IxjaExxWZuiGgG4LhDeGJRaZuCACHYIRDeGiRqRsCxiEY5BCeW2TqhgBzCMY5hEcXmbohIB2CoQ7h6UWmbghgh2C0Q3iAkakbAt4hGPAQHmJk6oaAeQgGPYTnGJm6IcAegnEP6TkGrhtKwD0k4x7ScwxcN5SAe0jGPaTnGLhuKAH3kIx7SM8xcN1QAu4hGfeQnmPguqEE3EMy7iE9x8B1Qwm4h2TcQ3qOgeuGEnAPybiH9BwD1w0l4B6ScQ/pOQauG0rAPSTjHtJzDFw3lIB7SMY9pOcYK4USbwm4h2TcQ3qOARNvCbCHZNhDeowBgzcJqIdk1EN6igETbwmgh2TQQ3qIgbfpAOYhGfOQnmHgpfsAeUiGPGTY7pBZvQ9cjyEPGXY8wMRbAuQhGfKQYdMDTLwlQB6SIQ8Z9j3AxFsC5CH5zoew9QEm3hLtfeCbHzzCwIm3RNsfZvsfZDbxlnAHBHM+jzAyy/LRJgi+C8IjDJx4S7QPgm+E8Agjs3UEbYXgeyHCZgjsf2g3BN8OEfZDZDaQAP/jOyI8wshsA0GbIviuCI8wMjtB0L4Ihjxk2BmBN4MA5iEZ85BhcwTeDwKYh2TMQ3qGgbeEAOQhGfKQquB+AHlIhjykRxg4cZYAeUiGPKRHGDhxlgB5SIY8pEcYuccH3I8hD+kRBk6cJUAekiEPGXZM4JkXIA/JkIcMmybwzAuQh2TIQ+rskioJiIdkxEOGnRM4cATEQzLiIcPmCRw4AuIhGfGQYf8E7n2AeEhGPGTYQoF7HyAekhEP6QkGTpwlIB6SEQ/pCQZOnCUgHpIRD+kJBk6cJSAekhEP6QkGTpwlIB6SEQ8ZdlRg9wXIQzLkIcOmCuy+gHlIxjxk2FeB/Q9AD8mghwxbK7D/AeohGfWQYXdFZh8g8D+GPWTYYIH9D2APybCH9BgDJ84SYA/JsIf0GAMnzhJgD8mwh/QYAyfOEmAPybCH9BgDJ84SYA/JsIcM+y2w/wHsIRn2kGHLBfY/gD0kwx4y7LrA/gewh2TYQ4aNF9j/APaQDHvIsPcC+x/AHpJhDxm2X+DEGWAPybCH9BgjkzgD7CEZ9pAeY2QSZ4A9JMMe0mOMTOIMsIdk2EN6jJFJnAH2kAx7SI8xMokzwB6SYQ8ZsAf2P4A9JMMe0nOMFVxwIwH3kIx7SM8xcOIMsIdk2EN6jIGDN0A9JKMe0lMMnDgD6CEZ9JABesDMATAPyZiH9AwDJy4AeUiGPGTYpYETF4A8JEMeMmzUwIkzQB6SIQ8Z9mrgxBkwD8mYhwzbNXDiDJiHZMxDhh0bOHEGzEMy5iHDpg2cOAPmIRnzkJ5hZBJnwDwkYx7SM4xM5gKYh2TMQ3qGkUmcAfOQjHlIzzAyiTNgHpIxDxn2cGD/A8xDMuYhwzYOnDgD5iEZ85CeYWQSZ8A8JGMe0jOMTOIMmIdkzEN6hpFJnAHzkIx5yHX+wwASMA/JmIf0DAMTawmYh2TMQ3qGgTMvgDwkQx7SI4zc5xiA+zHkIT3CyCTeAHlIhjykRxiZxBsgD8mQh1zndxFJgDwkQx7SI4xM4g2Qh2TIQ4WtHvjrDAB5KIY8VNjqgT/QAJCHYshD3WXrLgoQD8WIhwo7PfBnGgDxUIx4qLDTA3+pARAPxYiHCjs98McaAPFQjHiosNMD9l4FiIdixEN5goETbwWIh2LEQ3mCgRNvBYiHYsRDeYKBE28FiIdixEN5goETbwWIh2LEQ4WdHth9AfJQDHmosNMDuy9gHooxDxV2emD/A9BDMeihwk4P7H+AeihGPVTY6YH9D2APxbCHCjs9sP8B7KEY9lAeY+DEWwHsoRj2UB5j4MRbAeyhGPZQHmPgxFsB7KEY9lAeY+DEWwHsoRj2UGGnB/Y/gD0Uwx4q7PTA/gewh2LYQ4WdHtj/APZQDHuosNMD+x/AHophDxV2emD/A9hDMeyhwk4PmHgrgD0Uwx7KYwyceCuAPRTDHspjDJx4K4A9FMMeymMMnHgrgD0U/x6Uxxg48Vboi1D8k1Bhqwf2P/RRKP5VqLDVA/sf+i7U7MNQnvnCLxYq+Gko5n9hqwd8fOjjUPzrUB5j4M9poc9D8e9DeYoBE2+FvhDFPxEVoAfKPBT6RhT/SJRnGDDxUegrUfwzUR5h4MRboQ9FMeShPMPAibcCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahPMPAibcCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahPMPAmYsCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahPMPAibcCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahwkYPPHUB5qEY81CeYeDEWwHmoRjzUCb/SQMFmIdizEN5hoEzJ4A8FEMeyhTcDyAPxZCH8ggj8ylDgDwUQx7KI4zM1wwB8lAMeSiTX2mvAPJQDHkojzAy3zQEyEMx5KE8wsgkzgB5KIY8lEcYmcQZIA/FkIfyCCPzcUPgfYx4KE8wMokzIB6KEQ/lCUYmcQbEQzHioTzByCTOgHgoRjyUJxiZxBkQD8WIh/IEI5M4A+KhGPFQnmBkEmdAPBQjHsoTjEziDIiHYsRDeYKRSZwB8VCMeCiPMDKJM0AeiiEP5RlGJnEGzEMx5qE8xMgkzgB6KAY9lKcYmcQZUA/FqIfyGCOTOAPsoRj2UB5jZBJngD0Uwx7KY4xM4gywh2LYQ3mMkUmcAfZQDHsojzEyiTPAHophD+UxRiZxBthDMeyhPMbIJM4AeyiGPZTHGJnEGWAPxbCH8hgjkzgD7KEY9lAeY2QSZ4A9FMMeymOMTOIMsIdi2EN5jJFJnAH2UAx7KI8xMokzwB6KYQ/lMUYmcQbYQzHsoTzGyCTOAHsohj2UxxiZxBlgD8Wwh/IYI5M4A+yhGPZQAXtg/wPYQzHsodb5D7srwD0U4x7KcwycOAPsoRj2UOtC9Aewh2LYQwXsgYNvgD0Uwx7KY4wV3OKvAPZQDHsojzFWcIu/AthDMeyhPcZYrVDdQgPsoRn20B5jrOAWfw2wh2bYQ3uOsYZb/DXgHppxD+05xhpu8deAe2jGPbTnGGv4iQkNuIdm3EN7jrGGn5jQgHtoxj205xhrfEQG4B6acQ/tOcYafmJCA+6hGffQnmOsof9pwD004x7ac4w19D8NuIdm3EOHoyzgJyY04B6acQ/tOUbGfwH30Ix7aM8x1th/AffQjHvocKjF3R02ADggAx9aZAt/GnAPzbiHFtnCnwbYQzPsoUW28KcB9dCMemiRLfxpAD00gx5aZAt/GjAPzZiHFvnCnwbMQzPmoWW+8KcB89CMeWiZL/xpwDw0Yx5a5gt/GjAPzZiHlvnCnwbMQzPmoWW+8KcB89CMeWiZL/xpwDw0Yx5a5udeDZiHZsxDy3zhTwPmoRnz0DJf+NOAeWjGPLTMF/40YB6aMQ+t8oU/DZiHZsxDq3zhTwPmoRnz0Cpf+NOAeWjGPLTKF/40YB6aMQ+t8oU/DaCHZtBDq2zhTwPooRn00KrgfoB6aEY9tMoX/jTAHpphD63yhT8NuIdm3EOrfOFPA+6h+QkZOl/40+iMDH5Ihs4X/jQ6JoOfk6HzhT+NTsrgR2UE7nGHYz90WsbsuIzsiisNz8tg7hf2euBzK9CRGfzMjLDXAx9dgU7N4MdmhL0euPuigzP4yRlhrwfuvujsDH54hg7TL46d0fkZDHxoDzIyx6wA8KEZ+NAeZGROWgHgQzPwoT3IyBy2AsCHZuBDe5CROW8FgA/NwIcOmz0yR64AB2TgQ4fNHrgDAfKhGfnQYbMHdmBAPjQjHzps9sAODMiHZuRDh80e2IEB+dCMfGhPMnDpUQPyoRn50J5k4NKjBuRDM/KhPcnApUcNyIdm5EN7koFLjxqQD83Ih/YoA5ceNUAfmqEPHTZ7YP8D6EMz9KHDZo/MqT/A/xj60GGzB/Y/gD40Qx86bPbA/gfQh2boQ4fNHtj/APrQDH1ojzJw6VED9KEZ+tAeZeDSowboQzP0oT3KwKVHDdCHZuhDe5SBS48aoA/N0If2KAOXHjVAH5qhD+1RBi49aoA+NEMf2qMMXHrUAH1ohj50OJTjDn5mQgP2oRn70J5l4NwdoA/N0If2KAPHj4B8aEY+tCcZOHcH4EMz8KE9yMC5O+AemnEP7TkGzp0A9tAMe+iw2wPnTgB7aIY9dNjtgXN3gD00wx467PbAuTvAHpphDx12e+DcHWAPzbCHDrs9cO4OsIdm2EN7jJHJ3QH20Ax7aI8xMrk7wB6aYQ/tMUYmeQLYQzPsoT3GyOTuAHtohj10OL4D+x/AHpphDx12e2D/A9hDM+yhwyEeOHcH2EMz7KHDOR44dwfYQzPsocNRHpmj64D/Me6hw3YPHDsB7qEZ99CBe+DkE3APzbiHzp/poQH20Ax76HXB/QD20Ax7GI8xcO5uAPYwDHsYjzFw7m4A9jAMe5i7/GYjA7CHYdjDeIyBc3cDsIdh2MOE7R74LD6APQzDHiZs98DH8QHsYRj2MB5jwNdvAPUwjHqYsNsDH+gHqIdh1MOE3R74TD9APQyjHibs9sDH+gHqYRj1MGG3Bz47ElAPw6iH8RQDZ84GUA/DqIfxFANnzgZQD8Ooh/EQA2fOBkAPw6CH8RQDZ84GUA/DqIcJuz2w+wLsYRj2MGG3B3ZfwD0M4x4m7PbA/gfAh2Hgw4TdHtj/APkwjHyYsNsD+x8gH4aRDxN2e2D/A+TDMPJhPMnIHF8KyIdh5MN4kpE7wRT4HyMfxpOMzCGmgHwYRj6MJxmZc0wB+TCMfJiw2wP7HyAfhpEPE3Z7YP8D5MMw8mHCbg/sf4B8GEY+TNjtgf0PkA/DyIcJuz2w/wHyYRj5MGG3B8ycDSAfhpEP40kGzpwNIB+GkQ/jSQbOnA0gH4aRD+NJBs6cDSAfhpEP40kGzpwNIB+GkQ8TDvbA/gfQh2How3iUgTNnA9CHYejDqPCtA/idCQPYh2Hsw3iWATNnA9CHYejDhIM9YPQGyIdh5MN4kgEzZwPAh2Hgw3iQATNnA7iHYdzDeI4BMxcDsIdh2MOcsAeWB77HqIcJ2z1g5mwA9jAMe5iw3QNmzgZgD8OwhwnbPWDmbAD2MAx7mLDdA2bOBmAPw7CHCds9YOZsAPYwDHuYgD1g5mwA9TD82HBPMXDqYtDB4fzkcE8xcOZs0Nnh/PBwTzFw5mzQ8eH8/PCw3QP7HzpBnB8hHrZ7wMzZoEPEZ6eIm2zmbOA54sz/PMXAmbNBR4nzs8TDfg8cO6HTxPlx4ib/oQODDhTnJ4p7ioG/M2HQmeKMehib/cKfAdDDMOhhCgd7GAA9DIMexkOMTOYNoIdh0MN4iJHJvAH0MAx6GJtfdGAA9DAMephwsAcOHQH0MAx6mLDfA0/dAHoYBj1M2O+Bp24APQyDHsZmCy8GMA/DmIcJ2z1w5AmYh2HMw4TtHjjyBMzDMOZhwnYP3HsB8zCMeRjPMDKZN2AehjEP4xlGJvMGzMMw5mE8w8hk3oB5GMY8jGcYmcwbMA/DmIfxCCOTeQPkYRjyMGG7B3ZfwDwMYx4mbPfA7gugh2HQw4TtHtj/APUwjHqYsN0D+x/AHoZhDxO2e2D/A9zDMO5hPMfIZN6AexjGPYznGJnMG3APw7iH8Rwjk3kD7mEY9zCeY2Qyb8A9DOMexnOMTOYNuIdh3MOE7R7Y/wD3MIx7mLDdA/sf4B6GcQ8Ttntg/wPcwzDuYcJ2D+x/gHsYxj1M2O6B/Q9wD8O4h/EcI5N5A+5hGPcwnmNkMm/APQzjHsZzjEzmDbiHYdzDeI6RybwB9zCMexjPMTKZN+AehnEP4zlGJvMG3MMw7mE8yMhk3gB8GAY+TAAfd3DBvwHkwzDyYQvkwwLyYRn5sJ5kiDt7q9fvVusVa2DugZahD3uXPVzBAvJhGfmwd9nDFSwAH5aBD3uXPVzBAu5hGfewd9nDFSzAHpZhD3uXP1zBAu5hGfewd/nDFSzgHpZxD3uXP1zBAu5hGfewnmPg3NkC7mEZ97CeY+Dc2QLuYRn3sCKffFjAPSzjHtZzDJw7W8A9LOMeNuz2wO8fcA/LuIcNR3vg9w+4h2Xcw4r86GcB97CMe1jPMXDubQH3sIx72HC4xx3c7mMB+LAMfFgPMjKjDwAfloEPe9rygUcfQD4sIx/WkwysD8CHZeDDhi0fd3C/kgXkwzLyYT3JwMMXAB+WgQ8btnzcwf1KFpAPy8iHlfnxD4APy8CHlfnxD3APy7iHlYXxD3APy7iHlYXxD3APy7iHlYXxD3APy7iHlYXxD3APy7iHVYXxD3APy7iHVYXxD3APy7iHVYXxD3APy7iHDad7CLhd0wLwYRn4sB5kZAZQAD4sAx82HO+BHQiAD8vAh1WFARCAD8vAhw3gQ8A9BxaAD8vAh1XhK88SnQlvAfqwDH3Y8LWrsX4ORkAAPyyDH/b0uSvIbizAH5bhDxu+dyVgCGoBALEMgNjwwSthsQXADRkCsQGBCIcbAG7IGIg9MRA8jAMIYhkEsTq769wCBmIZA7GeacCPxVmAQCxDINYjDQvPJbcAgViGQKxHGjbjAcAHGQKxHmngc9EtQCCWIRDrkQY+3s0CBGIZArEm/5V7CxCIZQjEeqSBP9ZnAQKxDIFYjzQwwrIAgViGQOzpcHO4adgCBmIZA7HhdHO8ctkCCGIZBLEmu/7KAgZiGQOx4XRzgcMQAEEsgyA2HG+OAbIFFMQyCmLD+eYSz0QAg1iGQWw44FzikRyAEMtAiA0gRMLtTxaQEMtIiPVkw64QyLGAhFhGQmw44lzCbw9YgEIsQyHWhjXQ8OMDFrAQy1iIDSwER9OAhVjGQqy1hWgewBDLYIj1cCOjD7yQsRDr4QYOxgELsYyF2HDGOYxFAAqxDIVYjzZwKARIiGUkxDqRj6UBCbGMhNhwxDmOpQEJsYyEWKfysTQgIZaREOvJBub4FpAQy0iI9WQjE4sDEmIZCbFh9weOxQEJsYyE2HDeB47FAQmxjITYcN4HjsUBCbGMhNhw3kfGf4D7MRJiw/4P7D+AhFhGQqwnG5lQGpAQy0iIDTtAJA4jAQqxDIVYjzYyow9AIZahEBu2gGRGH8BCLGMhdpX99oYFKMQyFGJX2W9vWEBCLCMhNuwAkRbOHwCFWIZCbDjwA79/4H+MhNiwA0TiIBygEMtQiF3nxz9AQiwjIXZdGP8ACbGMhNh1YfwDJMQyEmLXhfEPkBDLSIhd57++YQEJsYyE2HX+6xsWkBDLSIgNX76SOAcCKMQyFGLXhQEQoBDLUIhdFwZAgEIsQyF2XRgAAQmxjIS4u/wA6AAJcYyEuLv8AOgACHEMhLi7MADCINoBFOIYCnF3Kl9LcACGOAZD3J3O1xIcwCGO4RDn+UamluAAEHEMiLgARBQM4x0gIo4REecJh3X4Ecyd0DEi4gIRgVGIA0TEMSLiPOHAk5ADRMQxIuICEcGTkANIxDEk4jziwPqAiDhGRJzI1qMdACKOARHnAQecQxzgIY7xEOf5BuzBDuAQx3CIEyY7BTiAQxzDIS6cdQ6nAAdwiGM4xAUcAqcAB2iIYzTEifxSVAdoiGM0xIn8UlQHYIhjMMTJ/FJUB2iIYzTEyfxSVAdgiGMwxMn8UlQHaIhjNMTJ/FJUB2CIYzDEyfxSVAdoiGM0xMnsB4gcoCGO0RAXaIiCJQgHcIhjOMQFHKJgCcIBHuIYD3GyMPoBHuIYD3Eyvw3TAR7iGA9xKr8L2AEe4hgPceELWNgBAA9xjIc4lV8N4wAPcYyHOI83cAnHARziGA5xKr8W3wEc4hgOcR5vZN4fwCGO4RBX+ASWAzjEMRziPN3APNoBGuIYDXGBhihYx3SAhjhGQ1ygIUrD+R/QEMdoiAs0RMEs1AEa4hgNcYGGKAgzHKAhjtEQF2gIzuMcoCGO0RB3oiE4igM0xDEa4gINUZDqO0BDHKMhLtAQ3A0ADnEMhzhdmIcBD3GMh7jClhAHeIhjPMSFLSG4GwAe4hgPcTp/+JEDPMQxHuLClpBMNwBAxDEg4sKekEw3AETEMSLiwqYQBZNBB5CIY0jEmVIuApiIY0zEBSaicDYEmIhjTMQFJpLpBoCJOMZEnMkvDXQAijgGRVyAIpmRAEARx6CIC1AkMxIAKOIYFHGmwIYdgCKOQREXoEjGjwAUcQyKuABFNM7nABRxDIo4W2DDDlARx6iIC1RE46AKUBHHqIgLVETjoApQEceoiLMmT6cdwCKOYREXsIiGYMcBLOIYFnE2jIcQ7DgARhwDI86GuBDPigCNOIZGnGcdQmdeI/BEBkdcOAgd5/WAjjhGR1ygIxrPaQCPOIZHXMAjmV8A+IhjfMQFPqLxkAoAiWOAxJ0ACR4RASFxjJC4EyHBnREgEscQiQuIJDMgAUbiGCNxgZEY3BkBJHEMkrgASQzujICSOEZJXKAkBvclgEkcwyQuHIuOc3yASRzDJC4ci25wXwScxDFO4k7nouO+CDiJY5zEhU9lGfiNdwdAiWOgxAVQYnBfAqDEMVDiwseyMCpwAJU4hkrcKr9u0AFW4hgrcaevZeG+CFiJY6zErfILBx2AJY7BEhdgicF9GcASx2CJC9/LsrgvA1ziGC5x6/zSQQdwiWO4xAVcYnFXBrzEMV7iAi+xuCsDYOIYMHEBmFjclQExcYyYuBMxwV0RIBPHkIkLyMTirgiQiWPIxAVkYnFXBMzEMWbiAjOxuCsCaOIYNHEBmljcEwA1cYyarO6CI0JPXgFssmLYZBU2kMCi5QpgkxXDJquATRzsCSuATVYMm6wCNnHQk1cAm6wYNlkFbOKgJ68ANlkxbLIK2MRBT14BbLJi2GQVsImDnrwC2IT+7W+3N9uXT8P+ODx+8/I4fL758scfbzY/7fbHm9t/3txvw78peet1br78541yN1/+81+3N+t1+K9Y3YX/kVqE/xmPkg//Y3X4n/FL1/5/xu9uhf+x/vZ/3ZKV4Z/JPv+30eDN4+PD2+G4e47NWa3P5oznxzQ29DwcDpsPQ/LDVPTDVqcfdHdHv0zQL5P0yzT9MkO/zNIvO90+blBuMujpaffLYfu8fdrsY5NGR55sGl21ubG3l59fdr+8pI2JuDHV1tjL8eN+97p9iFsy53ZOP92Gn97e3O71uN29HBL77uJ2140+8bq9/zTsD9td8ltl5KSy8Ze+bn8efk0acZFDnPxgfXr7wp38WqqTG8j16e2PZ3gGf6DuYciLxu8VhOdF/zLuYQn/Q140rkTx/zPSwEbDP4nYbhv9eOvoBbn2xtDL0atzo841PtLDYXs4bl7S8UNHT5XGjTtB3UxSN6Nx485QN7PUzabHuqZu1tjvj8fNw0f/T/HjigwSck3Pa9XZJPNlG70D+nXjfqPTy7b0sk/eM5LbPr3jr69D+obW8RvSTc39tDkMb/unuJ3I6cl36C0ZGt0FvSXqBeOZ4eHlCPofR2MgvWR7R84vyfkn71wZegxtb/Lh4yZ1KhPPAuRU1FepR0lNQ/Yd9VWyQGsyV1BfdW3D7WgKe4Qy6ivq9O7XigYOGi/okcm1o4FjRQMHGUOeM36opMmYp2Gzn0+S67v48TT+rt3j8NPT7uHnxMmirtLYyvPr03BMPTUOIUbq2NbQy+G4f3s47pIZMu5m5EunB01DttBrcluawx3N4TR4jyf/nfyXwhRHowy9DBsClxZLj3yIiRxCkKalyWAEf+F/+hTYiBNpjBsLOxriQ4k1sbltTT1ujpukDRu30eZyj8PwehiGxONWka8IPb2KtqnxcXi/eXs6plGQjqOgtrDl1M79OFze884ehY3y9GppOBM0zkt5slyu1tTZKUJeT+HC6V/GzzkH1zNt/kDmPe8eh9S26LfKk00rS7Y5so36x/qObJNkEv0P9Y/xg1dtJh0e9lsfRyRPX8SxCfUsR0nCyH1aWh8+H4eXx+ExcZTotVKzNG9Rvxc0EdAgrBQFFYL+x1FQQfOIJevG/bannkt+SA/TiTYPf78ZB7DtkHTdtYhH57Yn8H44Pnycve/xvJ1zU42d5NxUYpWKOjBlc4IehaC4RVI2pyibU/RwNIULhsZd0xg3vt8+HYdkmF/Hv+uuLbYJrfzjbfO0Pf46e1QqinMopROU0gnqwJJSOkUpnaJoSJNXGYqGTGPQ9X6/ex5HkhENRXFXPNCptuhzbIn+kYdxKm6u0RP2u+cP2/dJM3FwrNti1LGZ7fPmwyyyjKcV3TatjG39/XX4kLQTTy26sePtd8+vj+lPc3EzjZ45NvOSWhPP7brdAw6f0mbWcTNt4/7YzHH7PvlVq7u4nbbBemznl+Gn16SdONRrTB7fv708zEZ8Hb0tR53HNQbXH4aXYb8Z23zYvbzffkgHu3Uc8bS9wA9DEg2sVTyutD31D8Nx85Q49lrHrbQ98w/D8adfD8P+0/Yhib3WSSbT9tzjtmYD3Toex+/axpUP45QwS2tU5OmUngtKzwVNhZLSc0XpuaKJWFNcYWgiHr9A12rQPKuJh/DGYsnUUDoDx7+sMRP/MBxPcZd/5G+zcmJcmKR5hfJOsaLgi0JaRXmnchQGGnpIlJOM39trNG3mBqvIDcZzwnvaSZ6VjnNJevk0+YsVxbmUdCma/BUlXeNJdaffRUGla/bzYA9zTB0HUPSEacIWFHFLikcUTdiKyhfakCUUBhrX/IjiCCN9UnHQQg+IOoWgYFtOiQB1CkXRqTZkjaBk1DW6+W734Slxx1WcppCvWdPo6745WLON+qAVja/Rt3bc7dLYVURv0arGpva7fyS/Mk54qCNZ0xZrjI2h32gij3eNA9Y2yU1EHEw7iqadafuR25fXtzSHlXHZg4YSR2GsozqSaxwvvMD9w+6QqsQ9fVyK1dfU/euwvz/ufh7ScMDET2LVNmmHRg8PH4fnTfpY45KnaYu9QmPesPQ1i/ihTs/StgUqvtXd2/H17YiaNnHTjU/y8NOw2Q/7zdvxY5LTR23J0/hALiAo95JU/JJrGl1oNtFUbdIELEyrHx6mf4oLPXHgqhp7x8HnB0n8G8etqtFvD0+7h01a8bDxAHD66ad3SXMTlVfEVNh2lO5RCUZSXUAS8JFrmqhp+lKGMkGq6Gkqdev1hCgo4qE+aWybm/79wIhW9LtOP+PkoZSbU3AlKVyQ9LMUgShN84gmUGtoMLKU+1o51SepcE8Pzom2/vrz8Ov9ftgcdi/blw/3qB4aTUar6SVMLktxj2mb7J6eUiIcdRBFzm5NWz9+enr+OGweh33SgeU6bvL0+Cn2ceQq9JQl4XBlKfClWpKhAoqxbT3l6el5+3Ic9u83aZowfp4oGlLafOrp6RnMbyrqeeTJNDQIqoJLqtUpIpfKUpxKLmTIhYxt679PT8+nrIVNuCKecNsGp9lIIOKo0DXyk+fN/udHxs3jZQGNjXy+z84yJuZrq7b3NjYY5hbUYoypLfUmt2rzr7Hp48fty89jT503Ho/tp/dOYbygCUVSGC+pJyhyFk10W9PyCaMnAk68mwrTlgYAS6GxE+3PBzyYeEahkIvSVhrEJQX8kjxdTYM40VpNUNpQ1mspj7M0A9sp2KQJ19Fjcau23gCWpIh4HYNtDMFO7YBhd1wqG3Wstgzn1Nx+l+YT46rZqK22kfrUFguOYvx+emQ0Q1NeJmnikoSVFQ2pmmiyJsxrKN22lJZaCgUsea+led2RAzga+1xj1WxM+NInEi+RWLUNW/MSflIGPllEUw3lg5L6lSRaoWgQ1pL+h0r0hrJ0SymxJRplqT9YmnMdPWhHEYJbNfrJLP9dxetnGtfzwPxeRS5C1JwiLOGmkJceBUFzRYBV06BjaNAxtuP1HDdp3dHo+D03dkpfI9ozWhPXLRor0LjYJOJqP/kxzeG0HknSgCbp3appMRvFxJoYlyYEYqiiYhvh3NnC2RIVY+In1xaR+euT8SL6rZZeurubeu/UaWj8bQwjd09PG5Zf3sW1LjGFkm2zUmgPFRRiLGsb84Hd6/Cy2abWxcVrN72ltok/tAeti5PVxpcUWpuVdOK42TWuO4GLzyLvprdK/Z8SK0nzgKRYQ5EraEn/Q0Vqo6cJgfyGBkdHg6OjocY14o9TiDavo8SzbuvrPrflCynndAqUVOKnvGp0dtb+vFUbV3/WbXXCfJAq4iB16qKNSdDrZr95Ho4sKROxW1gKB1wjxXnd7A8oRtJxp6ISMsW+goI7Scm2otFW0SStabg1NEmbVdvTi016+Pj2kqyD0TFjopSZwloxVSkoKFfUTxQNDHqqQNA0YBrnLm8XCt10vGCXHhHFcIJCCjkVTCjUUtR5NZVQDHVe0zjgBJt48BSzRiqrCxq3Bc1xkmYGRcGOoiBCU8ZuKIwyjTVKbxAYu3Tk+OQ0gpxGUBFBUmSnyGkURXaa8mxDU7NpTBu9Sel68pjAU9xLTk7vQtJPl5T3KD1FB/Q/VHE005IsAht2WkJLPcLSg3eiLZQMhu+Hw+vu5ZBO/nG1cXXX0a/SsSMa26ZZ4/RbT4+FclAq+4mpzkYvRlJdUeqpcjWtL6UnN7mancqv024BepYkYSirNJMXTguNyD+c7BjeKLFP/DFKUmiwEDRYCKr7SBrEFA0WimJGTVG1oVnVNNYaEpvmA1z0ZmnEEOSkgsJ9SaOYohFD0XSv6REb8lLTmHkHw8ZJaxZYx+yH3qWgYUPQm5PUdxT1HUV9R9O+F0N1DtOYGp6s2qVJVVwbI6cQNGgIqjxK8iBFg4ai7FjTRgNDtQvTmOGdDZq9vLiwQK4jpnSDSoaShjJFrqMoSdW0UtJQqm4aIxlv1fy9RdM4uYyYIkTKECX5siKXUZT2aFoeaWhkMI1B0Otmf9xung7H/bB5hoNYnM862eajJ8o8XxoXY6speacAgdxAkhtIcgNFVQFNfqmpzG8ovrATJyacZKc0g2YuR2951bj5aopk00whLmev2x7Jfni/Hw4JHFvFCznI7XTjpoJTezjDjpN+6u1imilpvJc0Bil6qopSDk1juaEJ0qxb7fqwPfC3LqOByTZmuPvhefdpAAtp4qUqjStZkWPH6T15npg8j9CHpBFR0QCkKJrQhJwMOZyZ6nSNa8hndcp4fbZtxACH4SXh9zG0pjFM0BgmqfQgaWRVE3mkLqWptm2IMJp1qymzJWJxLjUVAqdeGP5LiYqYzKXKp5zsJpIlp8W81KnVtOx1ikspMtD00vS0dJqYg6bZx0wrf0ndUqZi6fdbygEsTe2OhmdHP8aRhKMo0zV2mdNTm9WO4lWxq8ZMESIqEa89bOTnh93bPn2T8Y5M0bjkJcwriTHxClTyg+n9T6+dXvJqesnU68hdNQW9xkzvjerlFOxayhfseiqYTHClbYIMv2D78mGMJR7YWkoRb+VyjSuOS3NtzNsaOenhuN8+pDWcGJK6xiWeh7fX193+eLjfvD1ud/fzZTw2jgPWbeMbazRUXNJW43JT6yhDrdIS3vvxvfBJOsb2bt34LKnl13Hp9PH+YfPwcdZuXFBfN/ZKahfHE/EWJtcYT0xNftry/cQi3s7k1o09ldr7Zfjp/jBuzPuYthm7ZuOSzMOvh+OQ9P4YKFNnF1NnJ+AhKb5WU6I7DduU2Rrq46b13Xpj7rennXmzvT5xiKLaHPG4+ZAOtPHWOdcIkY7D8+u4bPxtn44F0ZhNcw2laYICN0mBm6TATdHko+nRaZpdDVV07AQHpt19FINbmhWdbOvhPkNOVp9Ez5HGWgrjBQVTkuZlScGUojBey2mCnlYjkblUibJUlrAT97ubpmMq1zZuBURlh5iN0qMW06OmwoekpEuRyYqmJE3xi6GwxVC0YicmQCtwXOPWpxmwiJe4u8YlQ2MjfrhMHTf+nISbdro1LrhFM2Nc5qe3Lqa3TqhbUpqhKIRW5K2aaK8hJzUU1bnGbX2zcoSMpjCK9CmcFBQPSwowJMXDiiJETVbq86ovckkq/lgKAy1FNY4SeCfbRqrj7nXOp0W85WrVuKvsuDtungDjid924/Kn0BSAJXGt3zWS6tke3jhJmDYYk5e4xg+TvL3g5DNeqm0bm5oz6/hnTiCXQk/X3G5qnE0SY+om5IBE+SRlmooSEkWDoyYkaGhMNI0T/qdhv33/68Pu5WWYzYY2eWRkFllDxXNJeaKiuVhRv9BUTjfUHUxjzPDLfvP6OjzOBrsY3zjb3dYsco9Rh2v8JMdnzrTjMgsN59a0Gff5OaVBUVWjfv/fbm9et6/D0/ZluPnyx7/961//F3gkpv4="; \ No newline at end of file diff --git a/public/docs/classes/APIv1.html b/public/docs/classes/APIv1.html index dbc727d..97bab56 100644 --- a/public/docs/classes/APIv1.html +++ b/public/docs/classes/APIv1.html @@ -1,5 +1,5 @@ APIv1 | @themaximalist/llm.js
@themaximalist/llm.js
    Preparing search index...

    Class APIv1

    OpenAI API v1 Compatible Base Class

    -

    Hierarchy (View Summary)

    Index

    Constructors

    Hierarchy (View Summary)

    Index

    Constructors

    Properties

    Constructors

    Properties

    baseUrl: string
    extended: boolean
    json?: boolean
    max_thinking_tokens?: number
    max_tokens: number
    messages: Message[]
    model: string
    modelUsage: ModelUsage
    options: Options
    qualityFilter: QualityFilter
    service: string
    stream: boolean
    temperature?: number
    think: boolean
    tools?: Tool[]
    DEFAULT_BASE_URL: string = ""
    DEFAULT_MODEL: string = ""
    isBearerAuth: boolean = true
    isLocal: boolean = false
    KEY_REASONING_CONTENT: string = "reasoning_content"
    parsers: parsers = parsers
    service: string = "openai"

    Accessors

    • get apiKey(): undefined | null | string

      Returns undefined | null | string

    • get chatUrl(): string

      Returns string

    • get isLocal(): boolean

      Returns boolean

    • get llmHeaders(): Record<string, string>

      Returns Record<string, string>

    • get llmOptions(): Options

      Returns Options

    • get modelsUrl(): string

      Returns string

    • get parsers(): Parsers

      Returns Parsers

    Methods

    • Returns void

    • Parameters

      • content: string

      Returns void

    • Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns null | { input_tokens: any; output_tokens: any }

    • Returns Promise<void>

    • Parameters

      • data: any

      Returns string

    • Parameters

      • content: string

      Returns void

    • Parameters

      • content: string

      Returns void

    • Parameters

      • content: string

      Returns void

    • Returns Promise<boolean>

    +

    Constructors

    Properties

    baseUrl: string
    extended: boolean
    json?: boolean
    max_thinking_tokens?: number
    max_tokens: number
    messages: Message[]
    model: string
    modelUsage: ModelUsage
    options: Options
    qualityFilter: QualityFilter
    service: string
    stream: boolean
    temperature?: number
    think: boolean
    tools?: Tool[]
    DEFAULT_BASE_URL: string = ""
    DEFAULT_MODEL: string = ""
    isBearerAuth: boolean = true
    isLocal: boolean = false
    KEY_REASONING_CONTENT: string = "reasoning_content"
    parsers: parsers = parsers
    service: string = "openai"

    Accessors

    • get apiKey(): undefined | null | string

      Returns undefined | null | string

    • get chatUrl(): string

      Returns string

    • get isLocal(): boolean

      Returns boolean

    • get llmHeaders(): Record<string, string>

      Returns Record<string, string>

    • get llmOptions(): Options

      Returns Options

    • get modelsUrl(): string

      Returns string

    • get parsers(): Parsers

      Returns Parsers

    Methods

    • Returns void

    • Parameters

      • content: string

      Returns void

    • Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns string

    • Parameters

      • data: any

      Returns null | { input_tokens: any; output_tokens: any }

    • Returns Promise<void>

    • Parameters

      • data: any

      Returns string

    • Parameters

      • content: string

      Returns void

    • Parameters

      • content: string

      Returns void

    • Parameters

      Returns void

    • Returns Promise<boolean>

    diff --git a/public/docs/classes/Anthropic.html b/public/docs/classes/Anthropic.html index fafec35..f7a65f8 100644 --- a/public/docs/classes/Anthropic.html +++ b/public/docs/classes/Anthropic.html @@ -1,4 +1,4 @@ -Anthropic | @themaximalist/llm.js
    @themaximalist/llm.js
      Preparing search index...

      Class Anthropic

      Hierarchy (View Summary)

      Index

      Constructors

      constructor +Anthropic | @themaximalist/llm.js
      @themaximalist/llm.js
        Preparing search index...

        Class Anthropic

        Hierarchy (View Summary)

        Index

        Constructors

        Properties

        Constructors

        Properties

        baseUrl: string
        extended: boolean
        json?: boolean
        max_thinking_tokens?: number
        max_tokens: number
        messages: Message[]
        model: string
        modelUsage: ModelUsage
        options: Options
        qualityFilter: QualityFilter
        service: string
        stream: boolean
        temperature?: number
        think: boolean
        tools?: Tool[]
        API_VERSION: string = "2023-06-01"
        DEFAULT_BASE_URL: string = "https://api.anthropic.com/v1"
        DEFAULT_MODEL: string = "claude-opus-4-20250514"
        isBearerAuth: boolean = false
        isLocal: boolean = false
        parsers: parsers = parsers
        service: string = "anthropic"

        Accessors

        • get apiKey(): undefined | null | string

          Returns undefined | null | string

        • get chatUrl(): string

          Returns string

        • get isLocal(): boolean

          Returns boolean

        • get llmHeaders(): { "anthropic-version": string } & Record<string, string>

          Returns { "anthropic-version": string } & Record<string, string>

        • get llmOptions(): Options

          Returns Options

        • get modelsUrl(): string

          Returns string

        • get parsers(): Parsers

          Returns Parsers

        Methods

        • Returns void

        • Parameters

          • content: string

          Returns void

        • Returns string

        • Parameters

          • data: any

          Returns string

        • Parameters

          • chunk: any

          Returns string

        • Parameters

          • data: any

          Returns string

        • Parameters

          • chunk: any

          Returns string

        • Parameters

          • data: any

          Returns null | { input_tokens: number; output_tokens: number }

        • Returns Promise<void>

        • Parameters

          • data: any

          Returns string

        • Parameters

          • content: string

          Returns void

        • Parameters

          • content: string

          Returns void

        • Parameters

          • content: string

          Returns void

        • Returns Promise<boolean>

        +

        Constructors

        Properties

        baseUrl: string
        extended: boolean
        json?: boolean
        max_thinking_tokens?: number
        max_tokens: number
        messages: Message[]
        model: string
        modelUsage: ModelUsage
        options: Options
        qualityFilter: QualityFilter
        service: string
        stream: boolean
        temperature?: number
        think: boolean
        tools?: Tool[]
        API_VERSION: string = "2023-06-01"
        DEFAULT_BASE_URL: string = "https://api.anthropic.com/v1"
        DEFAULT_MODEL: string = "claude-opus-4-20250514"
        isBearerAuth: boolean = false
        isLocal: boolean = false
        parsers: parsers = parsers
        service: string = "anthropic"

        Accessors

        • get apiKey(): undefined | null | string

          Returns undefined | null | string

        • get chatUrl(): string

          Returns string

        • get isLocal(): boolean

          Returns boolean

        • get llmHeaders(): { "anthropic-version": string } & Record<string, string>

          Returns { "anthropic-version": string } & Record<string, string>

        • get llmOptions(): Options

          Returns Options

        • get modelsUrl(): string

          Returns string

        • get parsers(): Parsers

          Returns Parsers

        Methods

        • Returns void

        • Parameters

          • content: string

          Returns void

        • Returns string

        • Parameters

          • data: any

          Returns string

        • Parameters

          • chunk: any

          Returns string

        • Parameters

          • data: any

          Returns string

        • Parameters

          • chunk: any

          Returns string

        • Parameters

          • data: any

          Returns null | { input_tokens: number; output_tokens: number }

        • Returns Promise<void>

        • Parameters

          • data: any

          Returns string

        • Parameters

          • content: string

          Returns void

        • Parameters

          • content: string

          Returns void

        • Parameters

          Returns void

        • Returns Promise<boolean>

        diff --git a/public/docs/classes/Attachment.html b/public/docs/classes/Attachment.html new file mode 100644 index 0000000..56db947 --- /dev/null +++ b/public/docs/classes/Attachment.html @@ -0,0 +1,19 @@ +Attachment | @themaximalist/llm.js
        @themaximalist/llm.js
          Preparing search index...

          Class Attachment

          Index

          Constructors

          Properties

          contentType: string
          data: string

          Accessors

          • get content(): {
                source: | {
                    data?: undefined;
                    media_type?: undefined;
                    type: string;
                    url: string;
                }
                | { data: string; media_type: string; type: string; url?: undefined };
                type: AttachmentType;
            }

            Returns {
                source:
                    | {
                        data?: undefined;
                        media_type?: undefined;
                        type: string;
                        url: string;
                    }
                    | { data: string; media_type: string; type: string; url?: undefined };
                type: AttachmentType;
            }

          • get isDocument(): boolean

            Returns boolean

          • get isImage(): boolean

            Returns boolean

          • get source(): | {
                data?: undefined;
                media_type?: undefined;
                type: string;
                url: string;
            }
            | { data: string; media_type: string; type: string; url?: undefined }

            Returns
                | {
                    data?: undefined;
                    media_type?: undefined;
                    type: string;
                    url: string;
                }
                | { data: string; media_type: string; type: string; url?: undefined }

          Methods

          • Parameters

            • url: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          • Parameters

            • url: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          • Parameters

            • data: string

            Returns Attachment

          diff --git a/public/docs/classes/DeepSeek.html b/public/docs/classes/DeepSeek.html index faf4023..2bb6432 100644 --- a/public/docs/classes/DeepSeek.html +++ b/public/docs/classes/DeepSeek.html @@ -1,4 +1,4 @@ -DeepSeek | @themaximalist/llm.js
          @themaximalist/llm.js
            Preparing search index...

            Class DeepSeek

            Hierarchy (View Summary)

            Index

            Constructors

            constructor +DeepSeek | @themaximalist/llm.js
            @themaximalist/llm.js
              Preparing search index...

              Class DeepSeek

              Hierarchy (View Summary)

              Index

              Constructors

              Properties

              Constructors

              Properties

              baseUrl: string
              extended: boolean
              json?: boolean
              max_thinking_tokens?: number
              max_tokens: number
              messages: Message[]
              model: string
              modelUsage: ModelUsage
              options: Options
              qualityFilter: QualityFilter
              service: string
              stream: boolean
              temperature?: number
              think: boolean
              tools?: Tool[]
              DEFAULT_BASE_URL: string = "https://api.deepseek.com/v1/"
              DEFAULT_MODEL: string = "deepseek-chat"
              isBearerAuth: boolean = true
              isLocal: boolean = false
              KEY_REASONING_CONTENT: string = "reasoning_content"
              parsers: parsers = parsers
              service: string = "deepseek"

              Accessors

              • get apiKey(): undefined | null | string

                Returns undefined | null | string

              • get chatUrl(): string

                Returns string

              • get isLocal(): boolean

                Returns boolean

              • get llmHeaders(): Record<string, string>

                Returns Record<string, string>

              • get llmOptions(): Options

                Returns Options

              • get modelsUrl(): string

                Returns string

              • get parsers(): Parsers

                Returns Parsers

              Methods

              • Returns void

              • Parameters

                • content: string

                Returns void

              • Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns null | { input_tokens: any; output_tokens: any }

              • Returns Promise<void>

              • Parameters

                • data: any

                Returns string

              • Parameters

                • content: string

                Returns void

              • Parameters

                • content: string

                Returns void

              • Parameters

                • content: string

                Returns void

              • Returns Promise<boolean>

              +

              Constructors

              Properties

              baseUrl: string
              extended: boolean
              json?: boolean
              max_thinking_tokens?: number
              max_tokens: number
              messages: Message[]
              model: string
              modelUsage: ModelUsage
              options: Options
              qualityFilter: QualityFilter
              service: string
              stream: boolean
              temperature?: number
              think: boolean
              tools?: Tool[]
              DEFAULT_BASE_URL: string = "https://api.deepseek.com/v1/"
              DEFAULT_MODEL: string = "deepseek-chat"
              isBearerAuth: boolean = true
              isLocal: boolean = false
              KEY_REASONING_CONTENT: string = "reasoning_content"
              parsers: parsers = parsers
              service: string = "deepseek"

              Accessors

              • get apiKey(): undefined | null | string

                Returns undefined | null | string

              • get chatUrl(): string

                Returns string

              • get isLocal(): boolean

                Returns boolean

              • get llmHeaders(): Record<string, string>

                Returns Record<string, string>

              • get llmOptions(): Options

                Returns Options

              • get modelsUrl(): string

                Returns string

              • get parsers(): Parsers

                Returns Parsers

              Methods

              • Returns void

              • Parameters

                • content: string

                Returns void

              • Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns string

              • Parameters

                • data: any

                Returns null | { input_tokens: any; output_tokens: any }

              • Returns Promise<void>

              • Parameters

                • data: any

                Returns string

              • Parameters

                • content: string

                Returns void

              • Parameters

                • content: string

                Returns void

              • Parameters

                Returns void

              • Returns Promise<boolean>

              diff --git a/public/docs/classes/Google.html b/public/docs/classes/Google.html index b91f8e9..6289924 100644 --- a/public/docs/classes/Google.html +++ b/public/docs/classes/Google.html @@ -1,4 +1,4 @@ -Google | @themaximalist/llm.js
              @themaximalist/llm.js
                Preparing search index...

                Class Google

                Hierarchy (View Summary)

                Index

                Constructors

                constructor +Google | @themaximalist/llm.js
                @themaximalist/llm.js
                  Preparing search index...

                  Class Google

                  Hierarchy (View Summary)

                  Index

                  Constructors

                  Properties

                  Constructors

                  Properties

                  baseUrl: string
                  extended: boolean
                  json?: boolean
                  max_thinking_tokens?: number
                  max_tokens: number
                  messages: Message[]
                  model: string
                  modelUsage: ModelUsage
                  options: Options
                  qualityFilter: QualityFilter
                  service: string
                  stream: boolean
                  temperature?: number
                  think: boolean
                  tools?: Tool[]
                  DEFAULT_BASE_URL: string = "https://generativelanguage.googleapis.com/v1beta/"
                  DEFAULT_MODEL: string = "gemini-2.5-flash-preview-05-20"
                  isBearerAuth: boolean = false
                  isLocal: boolean = false
                  parsers: parsers = parsers
                  service: string = "google"

                  Accessors

                  • get apiKey(): undefined | null | string

                    Returns undefined | null | string

                  • get chatUrl(): string

                    Returns string

                  • get isLocal(): boolean

                    Returns boolean

                  • get llmHeaders(): Record<string, string>

                    Returns Record<string, string>

                  • get llmOptions(): Options

                    Returns Options

                  • get modelsUrl(): string

                    Returns string

                  • get parsers(): Parsers

                    Returns Parsers

                  Methods

                  • Returns void

                  • Parameters

                    • content: string

                    Returns void

                  • Returns string

                  • Parameters

                    • data: any

                    Returns string

                  • Parameters

                    • chunk: any

                    Returns string

                  • Parameters

                    • data: any

                    Returns string

                  • Parameters

                    • chunk: any

                    Returns string

                  • Parameters

                    • data: any

                    Returns null | { input_tokens: any; output_tokens: any }

                  • Returns Promise<void>

                  • Parameters

                    • data: any

                    Returns string

                  • Parameters

                    • content: string

                    Returns void

                  • Parameters

                    • content: string

                    Returns void

                  • Parameters

                    • content: string

                    Returns void

                  • Returns Promise<boolean>

                  +

                  Constructors

                  Properties

                  baseUrl: string
                  extended: boolean
                  json?: boolean
                  max_thinking_tokens?: number
                  max_tokens: number
                  messages: Message[]
                  model: string
                  modelUsage: ModelUsage
                  options: Options
                  qualityFilter: QualityFilter
                  service: string
                  stream: boolean
                  temperature?: number
                  think: boolean
                  tools?: Tool[]
                  DEFAULT_BASE_URL: string = "https://generativelanguage.googleapis.com/v1beta/"
                  DEFAULT_MODEL: string = "gemini-2.5-flash-preview-05-20"
                  isBearerAuth: boolean = false
                  isLocal: boolean = false
                  parsers: parsers = parsers
                  service: string = "google"

                  Accessors

                  • get apiKey(): undefined | null | string

                    Returns undefined | null | string

                  • get chatUrl(): string

                    Returns string

                  • get isLocal(): boolean

                    Returns boolean

                  • get llmHeaders(): Record<string, string>

                    Returns Record<string, string>

                  • get llmOptions(): Options

                    Returns Options

                  • get modelsUrl(): string

                    Returns string

                  • get parsers(): Parsers

                    Returns Parsers

                  Methods

                  • Returns void

                  • Parameters

                    • content: string

                    Returns void

                  • Returns string

                  • Parameters

                    • data: any

                    Returns string

                  • Parameters

                    • chunk: any

                    Returns string

                  • Parameters

                    • data: any

                    Returns string

                  • Parameters

                    • chunk: any

                    Returns string

                  • Parameters

                    • data: any

                    Returns null | { input_tokens: any; output_tokens: any }

                  • Returns Promise<void>

                  • Parameters

                    • data: any

                    Returns string

                  • Parameters

                    • content: string

                    Returns void

                  • Parameters

                    • content: string

                    Returns void

                  • Parameters

                    Returns void

                  • Returns Promise<boolean>

                  diff --git a/public/docs/classes/Groq.html b/public/docs/classes/Groq.html index 30b6dd1..24deb30 100644 --- a/public/docs/classes/Groq.html +++ b/public/docs/classes/Groq.html @@ -1,4 +1,4 @@ -Groq | @themaximalist/llm.js
                  @themaximalist/llm.js
                    Preparing search index...

                    Class Groq

                    Hierarchy (View Summary)

                    Index

                    Constructors

                    constructor +Groq | @themaximalist/llm.js
                    @themaximalist/llm.js
                      Preparing search index...

                      Class Groq

                      Hierarchy (View Summary)

                      Index

                      Constructors

                      Properties

                      Constructors

                      Properties

                      baseUrl: string
                      extended: boolean
                      json?: boolean
                      max_thinking_tokens?: number
                      max_tokens: number
                      messages: Message[]
                      model: string
                      modelUsage: ModelUsage
                      options: Options
                      qualityFilter: QualityFilter
                      service: string
                      stream: boolean
                      temperature?: number
                      think: boolean
                      tools?: Tool[]
                      DEFAULT_BASE_URL: string = "https://api.groq.com/openai/v1/"
                      DEFAULT_MODEL: string = "deepseek-r1-distill-llama-70b"
                      isBearerAuth: boolean = true
                      isLocal: boolean = false
                      KEY_REASONING_CONTENT: string = "reasoning"
                      parsers: parsers = parsers
                      service: string = "groq"

                      Accessors

                      • get apiKey(): undefined | null | string

                        Returns undefined | null | string

                      • get chatUrl(): string

                        Returns string

                      • get isLocal(): boolean

                        Returns boolean

                      • get llmHeaders(): Record<string, string>

                        Returns Record<string, string>

                      • get llmOptions(): Options

                        Returns Options

                      • get modelsUrl(): string

                        Returns string

                      • get parsers(): Parsers

                        Returns Parsers

                      Methods

                      • Returns void

                      • Parameters

                        • content: string

                        Returns void

                      • Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns null | { input_tokens: any; output_tokens: any }

                      • Returns Promise<void>

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • content: string

                        Returns void

                      • Parameters

                        • content: string

                        Returns void

                      • Parameters

                        • content: string

                        Returns void

                      • Returns Promise<boolean>

                      +

                      Constructors

                      Properties

                      baseUrl: string
                      extended: boolean
                      json?: boolean
                      max_thinking_tokens?: number
                      max_tokens: number
                      messages: Message[]
                      model: string
                      modelUsage: ModelUsage
                      options: Options
                      qualityFilter: QualityFilter
                      service: string
                      stream: boolean
                      temperature?: number
                      think: boolean
                      tools?: Tool[]
                      DEFAULT_BASE_URL: string = "https://api.groq.com/openai/v1/"
                      DEFAULT_MODEL: string = "deepseek-r1-distill-llama-70b"
                      isBearerAuth: boolean = true
                      isLocal: boolean = false
                      KEY_REASONING_CONTENT: string = "reasoning"
                      parsers: parsers = parsers
                      service: string = "groq"

                      Accessors

                      • get apiKey(): undefined | null | string

                        Returns undefined | null | string

                      • get chatUrl(): string

                        Returns string

                      • get isLocal(): boolean

                        Returns boolean

                      • get llmHeaders(): Record<string, string>

                        Returns Record<string, string>

                      • get llmOptions(): Options

                        Returns Options

                      • get modelsUrl(): string

                        Returns string

                      • get parsers(): Parsers

                        Returns Parsers

                      Methods

                      • Returns void

                      • Parameters

                        • content: string

                        Returns void

                      • Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • data: any

                        Returns null | { input_tokens: any; output_tokens: any }

                      • Returns Promise<void>

                      • Parameters

                        • data: any

                        Returns string

                      • Parameters

                        • content: string

                        Returns void

                      • Parameters

                        • content: string

                        Returns void

                      • Parameters

                        Returns void

                      • Returns Promise<boolean>

                      diff --git a/public/docs/classes/LLM.html b/public/docs/classes/LLM.html index c1fad83..6574a7f 100644 --- a/public/docs/classes/LLM.html +++ b/public/docs/classes/LLM.html @@ -1,5 +1,5 @@ LLM | @themaximalist/llm.js
                      @themaximalist/llm.js
                        Preparing search index...

                        Class LLM

                        LLM Base Class

                        -

                        Hierarchy (View Summary)

                        Index

                        Constructors

                        Hierarchy (View Summary)

                        Index

                        Constructors

                        Properties

                        Constructors

                        Properties

                        baseUrl: string
                        extended: boolean
                        json?: boolean
                        max_thinking_tokens?: number
                        max_tokens: number
                        messages: Message[]
                        model: string
                        modelUsage: ModelUsage
                        options: Options
                        qualityFilter: QualityFilter
                        service: string
                        stream: boolean
                        temperature?: number
                        think: boolean
                        tools?: Tool[]
                        DEFAULT_BASE_URL: string
                        DEFAULT_MODEL: string
                        isBearerAuth: boolean = false
                        isLocal: boolean = false
                        parsers: parsers = parsers
                        service: string

                        Accessors

                        • get apiKey(): undefined | null | string

                          Returns undefined | null | string

                        • get chatUrl(): string

                          Returns string

                        • get isLocal(): boolean

                          Returns boolean

                        • get llmHeaders(): Record<string, string>

                          Returns Record<string, string>

                        • get modelsUrl(): string

                          Returns string

                        Methods

                        • Returns void

                        • Parameters

                          Returns void

                        • Parameters

                          • content: string

                          Returns void

                        • Parameters

                          • input: string
                          • Optionaloptions: Options

                          Returns Promise<
                              | string
                              | Response
                              | PartialStreamResponse
                              | AsyncGenerator<string, any, any>,
                          >

                        • Returns Promise<Model[]>

                        • Parameters

                          Returns boolean

                        • Parameters

                          Returns string

                        • Returns string

                        • Returns Promise<Model[]>

                        • Parameters

                          • data: any

                          Returns string

                        • Parameters

                          • chunk: any

                          Returns string

                        • Parameters

                          • model: any

                          Returns Model

                        • Parameters

                          • data: any

                          Returns string

                        • Parameters

                          • chunk: any

                          Returns string

                        • Parameters

                          • data: any

                          Returns ToolCall[]

                        • Parameters

                          • chunk: any

                          Returns ToolCall[]

                        • Returns Promise<void>

                        • Parameters

                          • data: any

                          Returns string

                        • Parameters

                          • content: string

                          Returns void

                        • Parameters

                          • content: string

                          Returns void

                        • Parameters

                          Returns void

                        • Parameters

                          • content: string

                          Returns void

                        • Returns Promise<boolean>

                        +

                        Constructors

                        Properties

                        baseUrl: string
                        extended: boolean
                        json?: boolean
                        max_thinking_tokens?: number
                        max_tokens: number
                        messages: Message[]
                        model: string
                        modelUsage: ModelUsage
                        options: Options
                        qualityFilter: QualityFilter
                        service: string
                        stream: boolean
                        temperature?: number
                        think: boolean
                        tools?: Tool[]
                        DEFAULT_BASE_URL: string
                        DEFAULT_MODEL: string
                        isBearerAuth: boolean = false
                        isLocal: boolean = false
                        parsers: parsers = parsers
                        service: string

                        Accessors

                        • get apiKey(): undefined | null | string

                          Returns undefined | null | string

                        • get chatUrl(): string

                          Returns string

                        • get isLocal(): boolean

                          Returns boolean

                        • get llmHeaders(): Record<string, string>

                          Returns Record<string, string>

                        • get modelsUrl(): string

                          Returns string

                        Methods

                        • Returns void

                        • Parameters

                          Returns void

                        • Parameters

                          • content: string

                          Returns void

                        • Parameters

                          • input: string
                          • Optionaloptions: Options

                          Returns Promise<
                              | string
                              | Response
                              | PartialStreamResponse
                              | AsyncGenerator<string, any, any>,
                          >

                        • Returns Promise<Model[]>

                        • Parameters

                          Returns boolean

                        • Parameters

                          Returns string

                        • Returns string

                        • Returns Promise<Model[]>

                        • Parameters

                          • data: any

                          Returns string

                        • Parameters

                          • chunk: any

                          Returns string

                        • Parameters

                          • model: any

                          Returns Model

                        • Parameters

                          • data: any

                          Returns string

                        • Parameters

                          • chunk: any

                          Returns string

                        • Parameters

                          • data: any

                          Returns ToolCall[]

                        • Parameters

                          • chunk: any

                          Returns ToolCall[]

                        • Returns Promise<void>

                        • Parameters

                          • data: any

                          Returns string

                        • Parameters

                          • content: string

                          Returns void

                        • Parameters

                          • content: string

                          Returns void

                        • Parameters

                          Returns void

                        • Parameters

                          Returns void

                        • Returns Promise<boolean>

                        diff --git a/public/docs/classes/Ollama.html b/public/docs/classes/Ollama.html index 646c56f..7064fbf 100644 --- a/public/docs/classes/Ollama.html +++ b/public/docs/classes/Ollama.html @@ -1,4 +1,4 @@ -Ollama | @themaximalist/llm.js
                        @themaximalist/llm.js
                          Preparing search index...

                          Class Ollama

                          Hierarchy (View Summary)

                          Index

                          Constructors

                          constructor +Ollama | @themaximalist/llm.js
                          @themaximalist/llm.js
                            Preparing search index...

                            Class Ollama

                            Hierarchy (View Summary)

                            Index

                            Constructors

                            Properties

                            Constructors

                            Properties

                            baseUrl: string
                            extended: boolean
                            json?: boolean
                            max_thinking_tokens?: number
                            max_tokens: number
                            messages: Message[]
                            model: string
                            modelUsage: ModelUsage
                            options: Options
                            qualityFilter: QualityFilter
                            service: string
                            stream: boolean
                            temperature?: number
                            think: boolean
                            tools?: Tool[]
                            DEFAULT_BASE_URL: string = "http://localhost:11434"
                            DEFAULT_MODEL: string = "gemma3:4b"
                            isBearerAuth: boolean = false
                            isLocal: boolean = true
                            parsers: parsers = parsers
                            service: string = "ollama"

                            Accessors

                            • get apiKey(): undefined | null | string

                              Returns undefined | null | string

                            • get chatUrl(): string

                              Returns string

                            • get isLocal(): boolean

                              Returns boolean

                            • get llmHeaders(): Record<string, string>

                              Returns Record<string, string>

                            • get llmOptions(): Options

                              Returns Options

                            • get modelsUrl(): string

                              Returns string

                            • get modelUrl(): string

                              Returns string

                            • get parsers(): Parsers

                              Returns Parsers

                            Methods

                            • Returns void

                            • Parameters

                              • content: string

                              Returns void

                            • Parameters

                              • model: string

                              Returns Promise<any>

                            • Returns string

                            • Parameters

                              • data: any

                              Returns string

                            • Parameters

                              • chunk: any

                              Returns string

                            • Parameters

                              • data: any

                              Returns string

                            • Parameters

                              • chunk: any

                              Returns string

                            • Parameters

                              • usage: any

                              Returns null | { input_tokens: any; output_tokens: any }

                            • Returns Promise<void>

                            • Parameters

                              • data: any

                              Returns string

                            • Parameters

                              • content: string

                              Returns void

                            • Parameters

                              • content: string

                              Returns void

                            • Parameters

                              • content: string

                              Returns void

                            • Returns Promise<boolean>

                            +

                            Constructors

                            Properties

                            baseUrl: string
                            extended: boolean
                            json?: boolean
                            max_thinking_tokens?: number
                            max_tokens: number
                            messages: Message[]
                            model: string
                            modelUsage: ModelUsage
                            options: Options
                            qualityFilter: QualityFilter
                            service: string
                            stream: boolean
                            temperature?: number
                            think: boolean
                            tools?: Tool[]
                            DEFAULT_BASE_URL: string = "http://localhost:11434"
                            DEFAULT_MODEL: string = "gemma3:4b"
                            isBearerAuth: boolean = false
                            isLocal: boolean = true
                            parsers: parsers = parsers
                            service: string = "ollama"

                            Accessors

                            • get apiKey(): undefined | null | string

                              Returns undefined | null | string

                            • get chatUrl(): string

                              Returns string

                            • get isLocal(): boolean

                              Returns boolean

                            • get llmHeaders(): Record<string, string>

                              Returns Record<string, string>

                            • get llmOptions(): Options

                              Returns Options

                            • get modelsUrl(): string

                              Returns string

                            • get modelUrl(): string

                              Returns string

                            • get parsers(): Parsers

                              Returns Parsers

                            Methods

                            • Returns void

                            • Parameters

                              • content: string

                              Returns void

                            • Parameters

                              • model: string

                              Returns Promise<any>

                            • Returns string

                            • Parameters

                              • data: any

                              Returns string

                            • Parameters

                              • chunk: any

                              Returns string

                            • Parameters

                              • data: any

                              Returns string

                            • Parameters

                              • chunk: any

                              Returns string

                            • Parameters

                              • usage: any

                              Returns null | { input_tokens: any; output_tokens: any }

                            • Returns Promise<void>

                            • Parameters

                              • data: any

                              Returns string

                            • Parameters

                              • content: string

                              Returns void

                            • Parameters

                              • content: string

                              Returns void

                            • Parameters

                              Returns void

                            • Returns Promise<boolean>

                            diff --git a/public/docs/classes/OpenAI.html b/public/docs/classes/OpenAI.html index 0930be3..610a734 100644 --- a/public/docs/classes/OpenAI.html +++ b/public/docs/classes/OpenAI.html @@ -1,4 +1,4 @@ -OpenAI | @themaximalist/llm.js
                            @themaximalist/llm.js
                              Preparing search index...

                              Class OpenAI

                              Hierarchy (View Summary)

                              Index

                              Constructors

                              constructor +OpenAI | @themaximalist/llm.js
                              @themaximalist/llm.js
                                Preparing search index...

                                Class OpenAI

                                Hierarchy (View Summary)

                                Index

                                Constructors

                                Properties

                                Constructors

                                Properties

                                baseUrl: string
                                extended: boolean
                                json?: boolean
                                max_thinking_tokens?: number
                                max_tokens: number
                                messages: Message[]
                                model: string
                                modelUsage: ModelUsage
                                options: Options
                                qualityFilter: QualityFilter
                                service: string
                                stream: boolean
                                temperature?: number
                                think: boolean
                                tools?: Tool[]
                                DEFAULT_BASE_URL: string = "https://api.openai.com/v1"
                                DEFAULT_MODEL: string = "gpt-4o-mini"
                                isBearerAuth: boolean = true
                                isLocal: boolean = false
                                parsers: parsers = parsers
                                service: string = "openai"

                                Accessors

                                • get apiKey(): undefined | null | string

                                  Returns undefined | null | string

                                • get chatUrl(): string

                                  Returns string

                                • get isLocal(): boolean

                                  Returns boolean

                                • get llmHeaders(): Record<string, string>

                                  Returns Record<string, string>

                                • get llmOptions(): Options

                                  Returns Options

                                • get modelsUrl(): string

                                  Returns string

                                • get parsers(): Parsers

                                  Returns Parsers

                                Methods

                                • Returns void

                                • Parameters

                                  • content: string

                                  Returns void

                                • Returns string

                                • Parameters

                                  • data: any

                                  Returns string

                                • Parameters

                                  • chunk: any

                                  Returns string

                                • Parameters

                                  • data: any

                                  Returns string

                                • Parameters

                                  • chunk: any

                                  Returns string

                                • Parameters

                                  • data: any

                                  Returns null | { input_tokens: any; output_tokens: any }

                                • Returns Promise<void>

                                • Parameters

                                  • data: any

                                  Returns string

                                • Parameters

                                  • content: string

                                  Returns void

                                • Parameters

                                  • content: string

                                  Returns void

                                • Parameters

                                  • content: string

                                  Returns void

                                • Returns Promise<boolean>

                                +

                                Constructors

                                Properties

                                baseUrl: string
                                extended: boolean
                                json?: boolean
                                max_thinking_tokens?: number
                                max_tokens: number
                                messages: Message[]
                                model: string
                                modelUsage: ModelUsage
                                options: Options
                                qualityFilter: QualityFilter
                                service: string
                                stream: boolean
                                temperature?: number
                                think: boolean
                                tools?: Tool[]
                                DEFAULT_BASE_URL: string = "https://api.openai.com/v1"
                                DEFAULT_MODEL: string = "gpt-4o-mini"
                                isBearerAuth: boolean = true
                                isLocal: boolean = false
                                parsers: parsers = parsers
                                service: string = "openai"

                                Accessors

                                • get apiKey(): undefined | null | string

                                  Returns undefined | null | string

                                • get chatUrl(): string

                                  Returns string

                                • get isLocal(): boolean

                                  Returns boolean

                                • get llmHeaders(): Record<string, string>

                                  Returns Record<string, string>

                                • get llmOptions(): Options

                                  Returns Options

                                • get modelsUrl(): string

                                  Returns string

                                • get parsers(): Parsers

                                  Returns Parsers

                                Methods

                                • Returns void

                                • Parameters

                                  • content: string

                                  Returns void

                                • Returns string

                                • Parameters

                                  • data: any

                                  Returns string

                                • Parameters

                                  • chunk: any

                                  Returns string

                                • Parameters

                                  • data: any

                                  Returns string

                                • Parameters

                                  • chunk: any

                                  Returns string

                                • Parameters

                                  • data: any

                                  Returns null | { input_tokens: any; output_tokens: any }

                                • Returns Promise<void>

                                • Parameters

                                  • data: any

                                  Returns string

                                • Parameters

                                  • content: string

                                  Returns void

                                • Parameters

                                  • content: string

                                  Returns void

                                • Parameters

                                  Returns void

                                • Returns Promise<boolean>

                                diff --git a/public/docs/classes/xAI.html b/public/docs/classes/xAI.html index 2d9c3fc..db9d9b6 100644 --- a/public/docs/classes/xAI.html +++ b/public/docs/classes/xAI.html @@ -1,4 +1,4 @@ -xAI | @themaximalist/llm.js
                                @themaximalist/llm.js
                                  Preparing search index...

                                  Class xAI

                                  Hierarchy (View Summary)

                                  Index

                                  Constructors

                                  constructor +xAI | @themaximalist/llm.js
                                  @themaximalist/llm.js
                                    Preparing search index...

                                    Class xAI

                                    Hierarchy (View Summary)

                                    Index

                                    Constructors

                                    Properties

                                    Constructors

                                    Properties

                                    baseUrl: string
                                    extended: boolean
                                    json?: boolean
                                    max_thinking_tokens?: number
                                    max_tokens: number
                                    messages: Message[]
                                    model: string
                                    modelUsage: ModelUsage
                                    options: Options
                                    qualityFilter: QualityFilter
                                    service: string
                                    stream: boolean
                                    temperature?: number
                                    think: boolean
                                    tools?: Tool[]
                                    DEFAULT_BASE_URL: string = "https://api.x.ai/v1/"
                                    DEFAULT_MODEL: string = "grok-3"
                                    isBearerAuth: boolean = true
                                    isLocal: boolean = false
                                    KEY_REASONING_CONTENT: string = "reasoning_content"
                                    parsers: parsers = parsers
                                    service: string = "xai"

                                    Accessors

                                    • get apiKey(): undefined | null | string

                                      Returns undefined | null | string

                                    • get chatUrl(): string

                                      Returns string

                                    • get isLocal(): boolean

                                      Returns boolean

                                    • get llmHeaders(): Record<string, string>

                                      Returns Record<string, string>

                                    • get llmOptions(): Options

                                      Returns Options

                                    • get modelsUrl(): string

                                      Returns string

                                    • get parsers(): Parsers

                                      Returns Parsers

                                    Methods

                                    • Returns void

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns null | { input_tokens: any; output_tokens: any }

                                    • Returns Promise<void>

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Returns Promise<boolean>

                                    +

                                    Constructors

                                    Properties

                                    baseUrl: string
                                    extended: boolean
                                    json?: boolean
                                    max_thinking_tokens?: number
                                    max_tokens: number
                                    messages: Message[]
                                    model: string
                                    modelUsage: ModelUsage
                                    options: Options
                                    qualityFilter: QualityFilter
                                    service: string
                                    stream: boolean
                                    temperature?: number
                                    think: boolean
                                    tools?: Tool[]
                                    DEFAULT_BASE_URL: string = "https://api.x.ai/v1/"
                                    DEFAULT_MODEL: string = "grok-3"
                                    isBearerAuth: boolean = true
                                    isLocal: boolean = false
                                    KEY_REASONING_CONTENT: string = "reasoning_content"
                                    parsers: parsers = parsers
                                    service: string = "xai"

                                    Accessors

                                    • get apiKey(): undefined | null | string

                                      Returns undefined | null | string

                                    • get chatUrl(): string

                                      Returns string

                                    • get isLocal(): boolean

                                      Returns boolean

                                    • get llmHeaders(): Record<string, string>

                                      Returns Record<string, string>

                                    • get llmOptions(): Options

                                      Returns Options

                                    • get modelsUrl(): string

                                      Returns string

                                    • get parsers(): Parsers

                                      Returns Parsers

                                    Methods

                                    • Returns void

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • data: any

                                      Returns null | { input_tokens: any; output_tokens: any }

                                    • Returns Promise<void>

                                    • Parameters

                                      • data: any

                                      Returns string

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Parameters

                                      • content: string

                                      Returns void

                                    • Parameters

                                      Returns void

                                    • Returns Promise<boolean>

                                    diff --git a/public/docs/hierarchy.html b/public/docs/hierarchy.html index df51539..a394b76 100644 --- a/public/docs/hierarchy.html +++ b/public/docs/hierarchy.html @@ -1 +1 @@ -@themaximalist/llm.js
                                    @themaximalist/llm.js
                                      Preparing search index...
                                      +@themaximalist/llm.js
                                      @themaximalist/llm.js
                                        Preparing search index...
                                        diff --git a/public/docs/interfaces/AnthropicOptions.html b/public/docs/interfaces/AnthropicOptions.html index ec6c8ab..f2e2568 100644 --- a/public/docs/interfaces/AnthropicOptions.html +++ b/public/docs/interfaces/AnthropicOptions.html @@ -1,4 +1,5 @@ -AnthropicOptions | @themaximalist/llm.js
                                        @themaximalist/llm.js
                                          Preparing search index...

                                          Interface AnthropicOptions

                                          interface AnthropicOptions {
                                              apiKey?: string;
                                              baseUrl?: string;
                                              extended?: boolean;
                                              json?: boolean;
                                              max_thinking_tokens?: number;
                                              max_tokens?: number;
                                              messages?: Message[];
                                              model?: string;
                                              parser?: ParserResponse;
                                              qualityFilter?: QualityFilter;
                                              service?: string;
                                              stream?: boolean;
                                              temperature?: number;
                                              think?: boolean;
                                              thinking: { budget_tokens: number; type: "enabled" | "disabled" };
                                              tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                          }

                                          Hierarchy (View Summary)

                                          Index

                                          Properties

                                          apiKey? +AnthropicOptions | @themaximalist/llm.js
                                          @themaximalist/llm.js
                                            Preparing search index...

                                            Interface AnthropicOptions

                                            interface AnthropicOptions {
                                                apiKey?: string;
                                                attachments?: Attachment[];
                                                baseUrl?: string;
                                                extended?: boolean;
                                                json?: boolean;
                                                max_thinking_tokens?: number;
                                                max_tokens?: number;
                                                messages?: Message[];
                                                model?: string;
                                                parser?: ParserResponse;
                                                qualityFilter?: QualityFilter;
                                                service?: string;
                                                stream?: boolean;
                                                temperature?: number;
                                                think?: boolean;
                                                thinking: { budget_tokens: number; type: "enabled" | "disabled" };
                                                tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                            }

                                            Hierarchy (View Summary)

                                            Index

                                            Properties

                                            Properties

                                            apiKey?: string

                                            API Key for the service, Usage.local services do not need an API key

                                            -
                                            baseUrl?: string

                                            Base URL for the service

                                            -
                                            extended?: boolean

                                            Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                            -
                                            json?: boolean

                                            Enables JSON mode in LLM if available and parses output with parsers.json

                                            -
                                            max_thinking_tokens?: number

                                            Maximum number of tokens to use when thinking is enabled

                                            -
                                            max_tokens?: number

                                            Maximum number of tokens to generate

                                            -
                                            messages?: Message[]

                                            Messages to send to the model

                                            -
                                            model?: string

                                            Model to use, defaults to Ollama.DEFAULT_MODEL model

                                            -

                                            Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                            -
                                            qualityFilter?: QualityFilter

                                            Quality filter when dealing with model usage

                                            -
                                            service?: string

                                            Service to use, defaults to Ollama

                                            -
                                            stream?: boolean

                                            Enables streaming mode

                                            -
                                            temperature?: number

                                            Temperature for the model

                                            -
                                            think?: boolean

                                            Enables thinking mode

                                            -
                                            thinking: { budget_tokens: number; type: "enabled" | "disabled" }
                                            tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                            Tools available for the model to use, will enable Options.extended

                                            -
                                            +
                                            attachments?: Attachment[]

                                            Attachments to send to the model

                                            +
                                            baseUrl?: string

                                            Base URL for the service

                                            +
                                            extended?: boolean

                                            Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                            +
                                            json?: boolean

                                            Enables JSON mode in LLM if available and parses output with parsers.json

                                            +
                                            max_thinking_tokens?: number

                                            Maximum number of tokens to use when thinking is enabled

                                            +
                                            max_tokens?: number

                                            Maximum number of tokens to generate

                                            +
                                            messages?: Message[]

                                            Messages to send to the model

                                            +
                                            model?: string

                                            Model to use, defaults to Ollama.DEFAULT_MODEL model

                                            +

                                            Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                            +
                                            qualityFilter?: QualityFilter

                                            Quality filter when dealing with model usage

                                            +
                                            service?: string

                                            Service to use, defaults to Ollama

                                            +
                                            stream?: boolean

                                            Enables streaming mode

                                            +
                                            temperature?: number

                                            Temperature for the model

                                            +
                                            think?: boolean

                                            Enables thinking mode

                                            +
                                            thinking: { budget_tokens: number; type: "enabled" | "disabled" }
                                            tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                            Tools available for the model to use, will enable Options.extended

                                            +
                                            diff --git a/public/docs/interfaces/GoogleOptions.html b/public/docs/interfaces/GoogleOptions.html index 192a3b0..1031efc 100644 --- a/public/docs/interfaces/GoogleOptions.html +++ b/public/docs/interfaces/GoogleOptions.html @@ -1,4 +1,5 @@ -GoogleOptions | @themaximalist/llm.js
                                            @themaximalist/llm.js
                                              Preparing search index...

                                              Interface GoogleOptions

                                              interface GoogleOptions {
                                                  apiKey?: string;
                                                  baseUrl?: string;
                                                  contents?: { parts: { text: string }[] }[];
                                                  extended?: boolean;
                                                  generationConfig?: {
                                                      maxOutputTokens?: number;
                                                      temperature?: number;
                                                      thinkingConfig?: { includeThoughts: boolean };
                                                  };
                                                  json?: boolean;
                                                  max_thinking_tokens?: number;
                                                  max_tokens?: number;
                                                  messages?: Message[];
                                                  model?: string;
                                                  parser?: ParserResponse;
                                                  qualityFilter?: QualityFilter;
                                                  service?: string;
                                                  stream?: boolean;
                                                  system_instruction?: { parts: { text: string }[] };
                                                  temperature?: number;
                                                  think?: boolean;
                                                  tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                              }

                                              Hierarchy (View Summary)

                                              Index

                                              Properties

                                              apiKey? +GoogleOptions | @themaximalist/llm.js
                                              @themaximalist/llm.js
                                                Preparing search index...

                                                Interface GoogleOptions

                                                interface GoogleOptions {
                                                    apiKey?: string;
                                                    attachments?: Attachment[];
                                                    baseUrl?: string;
                                                    contents?: { parts: { text: string }[] }[];
                                                    extended?: boolean;
                                                    generationConfig?: {
                                                        maxOutputTokens?: number;
                                                        temperature?: number;
                                                        thinkingConfig?: { includeThoughts: boolean };
                                                    };
                                                    json?: boolean;
                                                    max_thinking_tokens?: number;
                                                    max_tokens?: number;
                                                    messages?: Message[];
                                                    model?: string;
                                                    parser?: ParserResponse;
                                                    qualityFilter?: QualityFilter;
                                                    service?: string;
                                                    stream?: boolean;
                                                    system_instruction?: { parts: { text: string }[] };
                                                    temperature?: number;
                                                    think?: boolean;
                                                    tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                }

                                                Hierarchy (View Summary)

                                                Index

                                                Properties

                                                Properties

                                                apiKey?: string

                                                API Key for the service, Usage.local services do not need an API key

                                                -
                                                baseUrl?: string

                                                Base URL for the service

                                                -
                                                contents?: { parts: { text: string }[] }[]
                                                extended?: boolean

                                                Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                -
                                                generationConfig?: {
                                                    maxOutputTokens?: number;
                                                    temperature?: number;
                                                    thinkingConfig?: { includeThoughts: boolean };
                                                }
                                                json?: boolean

                                                Enables JSON mode in LLM if available and parses output with parsers.json

                                                -
                                                max_thinking_tokens?: number

                                                Maximum number of tokens to use when thinking is enabled

                                                -
                                                max_tokens?: number

                                                Maximum number of tokens to generate

                                                -
                                                messages?: Message[]

                                                Messages to send to the model

                                                -
                                                model?: string

                                                Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                -

                                                Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                -
                                                qualityFilter?: QualityFilter

                                                Quality filter when dealing with model usage

                                                -
                                                service?: string

                                                Service to use, defaults to Ollama

                                                -
                                                stream?: boolean

                                                Enables streaming mode

                                                -
                                                system_instruction?: { parts: { text: string }[] }
                                                temperature?: number

                                                Temperature for the model

                                                -
                                                think?: boolean

                                                Enables thinking mode

                                                -
                                                tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                Tools available for the model to use, will enable Options.extended

                                                -
                                                +
                                                attachments?: Attachment[]

                                                Attachments to send to the model

                                                +
                                                baseUrl?: string

                                                Base URL for the service

                                                +
                                                contents?: { parts: { text: string }[] }[]
                                                extended?: boolean

                                                Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                +
                                                generationConfig?: {
                                                    maxOutputTokens?: number;
                                                    temperature?: number;
                                                    thinkingConfig?: { includeThoughts: boolean };
                                                }
                                                json?: boolean

                                                Enables JSON mode in LLM if available and parses output with parsers.json

                                                +
                                                max_thinking_tokens?: number

                                                Maximum number of tokens to use when thinking is enabled

                                                +
                                                max_tokens?: number

                                                Maximum number of tokens to generate

                                                +
                                                messages?: Message[]

                                                Messages to send to the model

                                                +
                                                model?: string

                                                Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                +

                                                Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                +
                                                qualityFilter?: QualityFilter

                                                Quality filter when dealing with model usage

                                                +
                                                service?: string

                                                Service to use, defaults to Ollama

                                                +
                                                stream?: boolean

                                                Enables streaming mode

                                                +
                                                system_instruction?: { parts: { text: string }[] }
                                                temperature?: number

                                                Temperature for the model

                                                +
                                                think?: boolean

                                                Enables thinking mode

                                                +
                                                tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                Tools available for the model to use, will enable Options.extended

                                                +
                                                diff --git a/public/docs/interfaces/InputOutputTokens.html b/public/docs/interfaces/InputOutputTokens.html index dc5dd79..342932e 100644 --- a/public/docs/interfaces/InputOutputTokens.html +++ b/public/docs/interfaces/InputOutputTokens.html @@ -1,3 +1,3 @@ -InputOutputTokens | @themaximalist/llm.js
                                                @themaximalist/llm.js
                                                  Preparing search index...

                                                  Interface InputOutputTokens

                                                  interface InputOutputTokens {
                                                      input_tokens: number;
                                                      output_tokens: number;
                                                  }

                                                  Hierarchy (View Summary)

                                                  Index

                                                  Properties

                                                  input_tokens +InputOutputTokens | @themaximalist/llm.js
                                                  @themaximalist/llm.js
                                                    Preparing search index...

                                                    Interface InputOutputTokens

                                                    interface InputOutputTokens {
                                                        input_tokens: number;
                                                        output_tokens: number;
                                                    }

                                                    Hierarchy (View Summary)

                                                    Index

                                                    Properties

                                                    input_tokens: number
                                                    output_tokens: number
                                                    +

                                                    Properties

                                                    input_tokens: number
                                                    output_tokens: number
                                                    diff --git a/public/docs/interfaces/LLMInterface.html b/public/docs/interfaces/LLMInterface.html index cb0d44f..aa61ce5 100644 --- a/public/docs/interfaces/LLMInterface.html +++ b/public/docs/interfaces/LLMInterface.html @@ -1,6 +1,7 @@ -LLMInterface | @themaximalist/llm.js
                                                    @themaximalist/llm.js
                                                      Preparing search index...

                                                      Interface LLMInterface

                                                      interface LLMInterface {
                                                          new LLMInterface(input: Input, options?: Options): LLMServices;
                                                          new LLMInterface(options: Options): LLMServices;
                                                          new LLMInterface(): LLMServices;
                                                          Anthropic: typeof Anthropic;
                                                          APIv1: typeof APIv1;
                                                          DeepSeek: typeof DeepSeek;
                                                          Google: typeof Google;
                                                          Groq: typeof Groq;
                                                          LLM: typeof LLM;
                                                          ModelUsage: typeof ModelUsage;
                                                          Ollama: typeof Ollama;
                                                          OpenAI: typeof OpenAI;
                                                          parsers: parsers;
                                                          services: any[];
                                                          xAI: typeof xAI;
                                                          register(LLMClass: typeof LLM): void;
                                                          unregister(LLMClass: typeof LLM): void;
                                                          (input: Input, options?: Options): Promise<string>;
                                                          (options: Options): Promise<string>;
                                                      }
                                                      • Parameters

                                                        Returns Promise<string>

                                                      • Parameters

                                                        Returns Promise<string>

                                                      Index

                                                      Constructors

                                                      constructor +LLMInterface | @themaximalist/llm.js
                                                      @themaximalist/llm.js
                                                        Preparing search index...

                                                        Interface LLMInterface

                                                        interface LLMInterface {
                                                            new LLMInterface(input: Input, options?: Options): LLMServices;
                                                            new LLMInterface(options: Options): LLMServices;
                                                            new LLMInterface(): LLMServices;
                                                            Anthropic: typeof Anthropic;
                                                            APIv1: typeof APIv1;
                                                            Attachment: typeof Attachment;
                                                            DeepSeek: typeof DeepSeek;
                                                            Google: typeof Google;
                                                            Groq: typeof Groq;
                                                            LLM: typeof LLM;
                                                            ModelUsage: typeof ModelUsage;
                                                            Ollama: typeof Ollama;
                                                            OpenAI: typeof OpenAI;
                                                            parsers: parsers;
                                                            services: any[];
                                                            xAI: typeof xAI;
                                                            register(LLMClass: typeof LLM): void;
                                                            unregister(LLMClass: typeof LLM): void;
                                                            (input: Input, options?: Options): Promise<string>;
                                                            (options: Options): Promise<string>;
                                                        }
                                                        • Parameters

                                                          Returns Promise<string>

                                                        • Parameters

                                                          Returns Promise<string>

                                                        Index

                                                        Constructors

                                                        Properties

                                                        Methods

                                                        Constructors

                                                        Properties

                                                        Anthropic: typeof Anthropic
                                                        APIv1: typeof APIv1
                                                        DeepSeek: typeof DeepSeek
                                                        Google: typeof Google
                                                        Groq: typeof Groq
                                                        LLM: typeof LLM
                                                        ModelUsage: typeof ModelUsage
                                                        Ollama: typeof Ollama
                                                        OpenAI: typeof OpenAI
                                                        parsers: parsers
                                                        services: any[]
                                                        xAI: typeof xAI

                                                        Methods

                                                        • Parameters

                                                          • LLMClass: typeof LLM

                                                          Returns void

                                                        • Parameters

                                                          • LLMClass: typeof LLM

                                                          Returns void

                                                        +

                                                        Constructors

                                                        Properties

                                                        Anthropic: typeof Anthropic
                                                        APIv1: typeof APIv1
                                                        Attachment: typeof Attachment
                                                        DeepSeek: typeof DeepSeek
                                                        Google: typeof Google
                                                        Groq: typeof Groq
                                                        LLM: typeof LLM
                                                        ModelUsage: typeof ModelUsage
                                                        Ollama: typeof Ollama
                                                        OpenAI: typeof OpenAI
                                                        parsers: parsers
                                                        services: any[]
                                                        xAI: typeof xAI

                                                        Methods

                                                        • Parameters

                                                          • LLMClass: typeof LLM

                                                          Returns void

                                                        • Parameters

                                                          • LLMClass: typeof LLM

                                                          Returns void

                                                        diff --git a/public/docs/interfaces/Message.html b/public/docs/interfaces/Message.html index ca882d2..682f272 100644 --- a/public/docs/interfaces/Message.html +++ b/public/docs/interfaces/Message.html @@ -1,3 +1,3 @@ -Message | @themaximalist/llm.js
                                                        @themaximalist/llm.js
                                                          Preparing search index...

                                                          Interface Message

                                                          interface Message {
                                                              content: any;
                                                              role: MessageRole;
                                                          }
                                                          Index

                                                          Properties

                                                          content +Message | @themaximalist/llm.js
                                                          @themaximalist/llm.js
                                                            Preparing search index...

                                                            Interface Message

                                                            interface Message {
                                                                content: any;
                                                                role: MessageRole;
                                                            }
                                                            Index

                                                            Properties

                                                            Properties

                                                            content: any
                                                            +

                                                            Properties

                                                            content: any
                                                            diff --git a/public/docs/interfaces/OllamaOptions.html b/public/docs/interfaces/OllamaOptions.html index ab6f917..decf5e6 100644 --- a/public/docs/interfaces/OllamaOptions.html +++ b/public/docs/interfaces/OllamaOptions.html @@ -1,4 +1,5 @@ -OllamaOptions | @themaximalist/llm.js
                                                            @themaximalist/llm.js
                                                              Preparing search index...

                                                              Interface OllamaOptions

                                                              interface OllamaOptions {
                                                                  apiKey?: string;
                                                                  baseUrl?: string;
                                                                  extended?: boolean;
                                                                  json?: boolean;
                                                                  max_thinking_tokens?: number;
                                                                  max_tokens?: number;
                                                                  messages?: Message[];
                                                                  model?: string;
                                                                  options?: { num_predict?: number };
                                                                  parser?: ParserResponse;
                                                                  qualityFilter?: QualityFilter;
                                                                  service?: string;
                                                                  stream?: boolean;
                                                                  temperature?: number;
                                                                  think?: boolean;
                                                                  tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                              }

                                                              Hierarchy (View Summary)

                                                              Index

                                                              Properties

                                                              apiKey? +OllamaOptions | @themaximalist/llm.js
                                                              @themaximalist/llm.js
                                                                Preparing search index...

                                                                Interface OllamaOptions

                                                                interface OllamaOptions {
                                                                    apiKey?: string;
                                                                    attachments?: Attachment[];
                                                                    baseUrl?: string;
                                                                    extended?: boolean;
                                                                    json?: boolean;
                                                                    max_thinking_tokens?: number;
                                                                    max_tokens?: number;
                                                                    messages?: Message[];
                                                                    model?: string;
                                                                    options?: { num_predict?: number };
                                                                    parser?: ParserResponse;
                                                                    qualityFilter?: QualityFilter;
                                                                    service?: string;
                                                                    stream?: boolean;
                                                                    temperature?: number;
                                                                    think?: boolean;
                                                                    tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                }

                                                                Hierarchy (View Summary)

                                                                Index

                                                                Properties

                                                                Properties

                                                                apiKey?: string

                                                                API Key for the service, Usage.local services do not need an API key

                                                                -
                                                                baseUrl?: string

                                                                Base URL for the service

                                                                -
                                                                extended?: boolean

                                                                Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                -
                                                                json?: boolean

                                                                Enables JSON mode in LLM if available and parses output with parsers.json

                                                                -
                                                                max_thinking_tokens?: number

                                                                Maximum number of tokens to use when thinking is enabled

                                                                -
                                                                max_tokens?: number

                                                                Maximum number of tokens to generate

                                                                -
                                                                messages?: Message[]

                                                                Messages to send to the model

                                                                -
                                                                model?: string

                                                                Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                -
                                                                options?: { num_predict?: number }

                                                                Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                -
                                                                qualityFilter?: QualityFilter

                                                                Quality filter when dealing with model usage

                                                                -
                                                                service?: string

                                                                Service to use, defaults to Ollama

                                                                -
                                                                stream?: boolean

                                                                Enables streaming mode

                                                                -
                                                                temperature?: number

                                                                Temperature for the model

                                                                -
                                                                think?: boolean

                                                                Enables thinking mode

                                                                +
                                                                attachments?: Attachment[]

                                                                Attachments to send to the model

                                                                +
                                                                baseUrl?: string

                                                                Base URL for the service

                                                                +
                                                                extended?: boolean

                                                                Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                +
                                                                json?: boolean

                                                                Enables JSON mode in LLM if available and parses output with parsers.json

                                                                +
                                                                max_thinking_tokens?: number

                                                                Maximum number of tokens to use when thinking is enabled

                                                                +
                                                                max_tokens?: number

                                                                Maximum number of tokens to generate

                                                                +
                                                                messages?: Message[]

                                                                Messages to send to the model

                                                                +
                                                                model?: string

                                                                Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                +
                                                                options?: { num_predict?: number }

                                                                Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                +
                                                                qualityFilter?: QualityFilter

                                                                Quality filter when dealing with model usage

                                                                +
                                                                service?: string

                                                                Service to use, defaults to Ollama

                                                                +
                                                                stream?: boolean

                                                                Enables streaming mode

                                                                +
                                                                temperature?: number

                                                                Temperature for the model

                                                                +
                                                                think?: boolean

                                                                Enables thinking mode

                                                                tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                Tools available for the model to use, will enable Options.extended

                                                                -
                                                                +
                                                                diff --git a/public/docs/interfaces/OpenAIOptions.html b/public/docs/interfaces/OpenAIOptions.html index e182042..e0271fd 100644 --- a/public/docs/interfaces/OpenAIOptions.html +++ b/public/docs/interfaces/OpenAIOptions.html @@ -1,4 +1,5 @@ -OpenAIOptions | @themaximalist/llm.js
                                                                @themaximalist/llm.js
                                                                  Preparing search index...

                                                                  Interface OpenAIOptions

                                                                  interface OpenAIOptions {
                                                                      apiKey?: string;
                                                                      baseUrl?: string;
                                                                      extended?: boolean;
                                                                      input?: string | Message[];
                                                                      json?: boolean;
                                                                      max_output_tokens?: number;
                                                                      max_thinking_tokens?: number;
                                                                      max_tokens?: number;
                                                                      messages?: Message[];
                                                                      model?: string;
                                                                      parser?: ParserResponse;
                                                                      qualityFilter?: QualityFilter;
                                                                      reasoning?: {
                                                                          effort: "low" | "medium" | "high";
                                                                          summary: "auto" | "concise" | "detailed";
                                                                      };
                                                                      service?: string;
                                                                      stream?: boolean;
                                                                      temperature?: number;
                                                                      think?: boolean;
                                                                      tools?: Tool[]
                                                                      | WrappedTool[]
                                                                      | OpenAITool[];
                                                                  }

                                                                  Hierarchy (View Summary)

                                                                  Index

                                                                  Properties

                                                                  apiKey? +OpenAIOptions | @themaximalist/llm.js
                                                                  @themaximalist/llm.js
                                                                    Preparing search index...

                                                                    Interface OpenAIOptions

                                                                    interface OpenAIOptions {
                                                                        apiKey?: string;
                                                                        attachments?: Attachment[];
                                                                        baseUrl?: string;
                                                                        extended?: boolean;
                                                                        input?: string | Message[];
                                                                        json?: boolean;
                                                                        max_output_tokens?: number;
                                                                        max_thinking_tokens?: number;
                                                                        max_tokens?: number;
                                                                        messages?: Message[];
                                                                        model?: string;
                                                                        parser?: ParserResponse;
                                                                        qualityFilter?: QualityFilter;
                                                                        reasoning?: {
                                                                            effort: "low" | "medium" | "high";
                                                                            summary: "auto" | "concise" | "detailed";
                                                                        };
                                                                        service?: string;
                                                                        stream?: boolean;
                                                                        temperature?: number;
                                                                        think?: boolean;
                                                                        tools?: Tool[]
                                                                        | WrappedTool[]
                                                                        | OpenAITool[];
                                                                    }

                                                                    Hierarchy (View Summary)

                                                                    Index

                                                                    Properties

                                                                    Properties

                                                                    apiKey?: string

                                                                    API Key for the service, Usage.local services do not need an API key

                                                                    -
                                                                    baseUrl?: string

                                                                    Base URL for the service

                                                                    -
                                                                    extended?: boolean

                                                                    Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                    -
                                                                    input?: string | Message[]
                                                                    json?: boolean

                                                                    Enables JSON mode in LLM if available and parses output with parsers.json

                                                                    -
                                                                    max_output_tokens?: number
                                                                    max_thinking_tokens?: number

                                                                    Maximum number of tokens to use when thinking is enabled

                                                                    -
                                                                    max_tokens?: number

                                                                    Maximum number of tokens to generate

                                                                    -
                                                                    messages?: Message[]

                                                                    Messages to send to the model

                                                                    -
                                                                    model?: string

                                                                    Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                    -

                                                                    Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                    -
                                                                    qualityFilter?: QualityFilter

                                                                    Quality filter when dealing with model usage

                                                                    -
                                                                    reasoning?: {
                                                                        effort: "low" | "medium" | "high";
                                                                        summary: "auto" | "concise" | "detailed";
                                                                    }
                                                                    service?: string

                                                                    Service to use, defaults to Ollama

                                                                    -
                                                                    stream?: boolean

                                                                    Enables streaming mode

                                                                    -
                                                                    temperature?: number

                                                                    Temperature for the model

                                                                    -
                                                                    think?: boolean

                                                                    Enables thinking mode

                                                                    -
                                                                    tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                    Tools available for the model to use, will enable Options.extended

                                                                    -
                                                                    +
                                                                    attachments?: Attachment[]

                                                                    Attachments to send to the model

                                                                    +
                                                                    baseUrl?: string

                                                                    Base URL for the service

                                                                    +
                                                                    extended?: boolean

                                                                    Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                    +
                                                                    input?: string | Message[]
                                                                    json?: boolean

                                                                    Enables JSON mode in LLM if available and parses output with parsers.json

                                                                    +
                                                                    max_output_tokens?: number
                                                                    max_thinking_tokens?: number

                                                                    Maximum number of tokens to use when thinking is enabled

                                                                    +
                                                                    max_tokens?: number

                                                                    Maximum number of tokens to generate

                                                                    +
                                                                    messages?: Message[]

                                                                    Messages to send to the model

                                                                    +
                                                                    model?: string

                                                                    Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                    +

                                                                    Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                    +
                                                                    qualityFilter?: QualityFilter

                                                                    Quality filter when dealing with model usage

                                                                    +
                                                                    reasoning?: {
                                                                        effort: "low" | "medium" | "high";
                                                                        summary: "auto" | "concise" | "detailed";
                                                                    }
                                                                    service?: string

                                                                    Service to use, defaults to Ollama

                                                                    +
                                                                    stream?: boolean

                                                                    Enables streaming mode

                                                                    +
                                                                    temperature?: number

                                                                    Temperature for the model

                                                                    +
                                                                    think?: boolean

                                                                    Enables thinking mode

                                                                    +
                                                                    tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                    Tools available for the model to use, will enable Options.extended

                                                                    +
                                                                    diff --git a/public/docs/interfaces/Options.html b/public/docs/interfaces/Options.html index b488037..f98cc6f 100644 --- a/public/docs/interfaces/Options.html +++ b/public/docs/interfaces/Options.html @@ -1,4 +1,5 @@ -Options | @themaximalist/llm.js
                                                                    @themaximalist/llm.js
                                                                      Preparing search index...

                                                                      Interface Options

                                                                      interface Options {
                                                                          apiKey?: string;
                                                                          baseUrl?: string;
                                                                          extended?: boolean;
                                                                          json?: boolean;
                                                                          max_thinking_tokens?: number;
                                                                          max_tokens?: number;
                                                                          messages?: Message[];
                                                                          model?: string;
                                                                          parser?: ParserResponse;
                                                                          qualityFilter?: QualityFilter;
                                                                          service?: string;
                                                                          stream?: boolean;
                                                                          temperature?: number;
                                                                          think?: boolean;
                                                                          tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                      }

                                                                      Hierarchy (View Summary)

                                                                      Index

                                                                      Properties

                                                                      apiKey? +Options | @themaximalist/llm.js
                                                                      @themaximalist/llm.js
                                                                        Preparing search index...

                                                                        Interface Options

                                                                        interface Options {
                                                                            apiKey?: string;
                                                                            attachments?: Attachment[];
                                                                            baseUrl?: string;
                                                                            extended?: boolean;
                                                                            json?: boolean;
                                                                            max_thinking_tokens?: number;
                                                                            max_tokens?: number;
                                                                            messages?: Message[];
                                                                            model?: string;
                                                                            parser?: ParserResponse;
                                                                            qualityFilter?: QualityFilter;
                                                                            service?: string;
                                                                            stream?: boolean;
                                                                            temperature?: number;
                                                                            think?: boolean;
                                                                            tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                        }

                                                                        Hierarchy (View Summary)

                                                                        Index

                                                                        Properties

                                                                        Properties

                                                                        apiKey?: string

                                                                        API Key for the service, Usage.local services do not need an API key

                                                                        -
                                                                        baseUrl?: string

                                                                        Base URL for the service

                                                                        -
                                                                        extended?: boolean

                                                                        Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                        -
                                                                        json?: boolean

                                                                        Enables JSON mode in LLM if available and parses output with parsers.json

                                                                        -
                                                                        max_thinking_tokens?: number

                                                                        Maximum number of tokens to use when thinking is enabled

                                                                        -
                                                                        max_tokens?: number

                                                                        Maximum number of tokens to generate

                                                                        -
                                                                        messages?: Message[]

                                                                        Messages to send to the model

                                                                        -
                                                                        model?: string

                                                                        Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                        -

                                                                        Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                        -
                                                                        qualityFilter?: QualityFilter

                                                                        Quality filter when dealing with model usage

                                                                        -
                                                                        service?: string

                                                                        Service to use, defaults to Ollama

                                                                        -
                                                                        stream?: boolean

                                                                        Enables streaming mode

                                                                        -
                                                                        temperature?: number

                                                                        Temperature for the model

                                                                        -
                                                                        think?: boolean

                                                                        Enables thinking mode

                                                                        -
                                                                        tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                        Tools available for the model to use, will enable Options.extended

                                                                        -
                                                                        +
                                                                        attachments?: Attachment[]

                                                                        Attachments to send to the model

                                                                        +
                                                                        baseUrl?: string

                                                                        Base URL for the service

                                                                        +
                                                                        extended?: boolean

                                                                        Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                        +
                                                                        json?: boolean

                                                                        Enables JSON mode in LLM if available and parses output with parsers.json

                                                                        +
                                                                        max_thinking_tokens?: number

                                                                        Maximum number of tokens to use when thinking is enabled

                                                                        +
                                                                        max_tokens?: number

                                                                        Maximum number of tokens to generate

                                                                        +
                                                                        messages?: Message[]

                                                                        Messages to send to the model

                                                                        +
                                                                        model?: string

                                                                        Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                        +

                                                                        Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                        +
                                                                        qualityFilter?: QualityFilter

                                                                        Quality filter when dealing with model usage

                                                                        +
                                                                        service?: string

                                                                        Service to use, defaults to Ollama

                                                                        +
                                                                        stream?: boolean

                                                                        Enables streaming mode

                                                                        +
                                                                        temperature?: number

                                                                        Temperature for the model

                                                                        +
                                                                        think?: boolean

                                                                        Enables thinking mode

                                                                        +
                                                                        tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                        Tools available for the model to use, will enable Options.extended

                                                                        +
                                                                        diff --git a/public/docs/interfaces/Parsers.html b/public/docs/interfaces/Parsers.html index 67011f2..a10b26f 100644 --- a/public/docs/interfaces/Parsers.html +++ b/public/docs/interfaces/Parsers.html @@ -1 +1 @@ -Parsers | @themaximalist/llm.js
                                                                        @themaximalist/llm.js
                                                                          Preparing search index...

                                                                          Interface Parsers

                                                                          Indexable

                                                                          +Parsers | @themaximalist/llm.js
                                                                          @themaximalist/llm.js
                                                                            Preparing search index...

                                                                            Interface Parsers

                                                                            Indexable

                                                                            diff --git a/public/docs/interfaces/PartialStreamResponse.html b/public/docs/interfaces/PartialStreamResponse.html index 02ecbf7..d0e8aee 100644 --- a/public/docs/interfaces/PartialStreamResponse.html +++ b/public/docs/interfaces/PartialStreamResponse.html @@ -1,6 +1,6 @@ -PartialStreamResponse | @themaximalist/llm.js
                                                                            @themaximalist/llm.js
                                                                              Preparing search index...

                                                                              Interface PartialStreamResponse

                                                                              interface PartialStreamResponse {
                                                                                  complete: () => Promise<StreamResponse>;
                                                                                  options: Options;
                                                                                  service: string;
                                                                                  stream:
                                                                                      | AsyncGenerator<string, any, any>
                                                                                      | AsyncGenerator<Record<string, string | InputOutputTokens>, any, any>;
                                                                                  think: boolean;
                                                                              }
                                                                              Index

                                                                              Properties

                                                                              complete +PartialStreamResponse | @themaximalist/llm.js
                                                                              @themaximalist/llm.js
                                                                                Preparing search index...

                                                                                Interface PartialStreamResponse

                                                                                interface PartialStreamResponse {
                                                                                    complete: () => Promise<StreamResponse>;
                                                                                    options: Options;
                                                                                    service: string;
                                                                                    stream:
                                                                                        | AsyncGenerator<string, any, any>
                                                                                        | AsyncGenerator<Record<string, string | InputOutputTokens>, any, any>;
                                                                                    think: boolean;
                                                                                }
                                                                                Index

                                                                                Properties

                                                                                complete: () => Promise<StreamResponse>
                                                                                options: Options
                                                                                service: string
                                                                                stream:
                                                                                    | AsyncGenerator<string, any, any>
                                                                                    | AsyncGenerator<Record<string, string | InputOutputTokens>, any, any>
                                                                                think: boolean
                                                                                +

                                                                                Properties

                                                                                complete: () => Promise<StreamResponse>
                                                                                options: Options
                                                                                service: string
                                                                                stream:
                                                                                    | AsyncGenerator<string, any, any>
                                                                                    | AsyncGenerator<Record<string, string | InputOutputTokens>, any, any>
                                                                                think: boolean
                                                                                diff --git a/public/docs/interfaces/Response.html b/public/docs/interfaces/Response.html index 158eb43..42700c5 100644 --- a/public/docs/interfaces/Response.html +++ b/public/docs/interfaces/Response.html @@ -1,8 +1,8 @@ -Response | @themaximalist/llm.js
                                                                                @themaximalist/llm.js
                                                                                  Preparing search index...

                                                                                  Interface Response

                                                                                  interface Response {
                                                                                      content: string;
                                                                                      messages: Message[];
                                                                                      options: Options;
                                                                                      service: string;
                                                                                      thinking?: string;
                                                                                      tool_calls?: ToolCall[];
                                                                                      usage: Usage;
                                                                                  }

                                                                                  Hierarchy (View Summary)

                                                                                  Index

                                                                                  Properties

                                                                                  content +Response | @themaximalist/llm.js
                                                                                  @themaximalist/llm.js
                                                                                    Preparing search index...

                                                                                    Interface Response

                                                                                    interface Response {
                                                                                        content: string;
                                                                                        messages: Message[];
                                                                                        options: Options;
                                                                                        service: string;
                                                                                        thinking?: string;
                                                                                        tool_calls?: ToolCall[];
                                                                                        usage: Usage;
                                                                                    }

                                                                                    Hierarchy (View Summary)

                                                                                    Index

                                                                                    Properties

                                                                                    content: string
                                                                                    messages: Message[]
                                                                                    options: Options
                                                                                    service: string
                                                                                    thinking?: string
                                                                                    tool_calls?: ToolCall[]
                                                                                    usage: Usage
                                                                                    +

                                                                                    Properties

                                                                                    content: string
                                                                                    messages: Message[]
                                                                                    options: Options
                                                                                    service: string
                                                                                    thinking?: string
                                                                                    tool_calls?: ToolCall[]
                                                                                    usage: Usage
                                                                                    diff --git a/public/docs/interfaces/StreamResponse.html b/public/docs/interfaces/StreamResponse.html index 67a3b5c..2bd3883 100644 --- a/public/docs/interfaces/StreamResponse.html +++ b/public/docs/interfaces/StreamResponse.html @@ -1,8 +1,8 @@ -StreamResponse | @themaximalist/llm.js
                                                                                    @themaximalist/llm.js
                                                                                      Preparing search index...

                                                                                      Interface StreamResponse

                                                                                      interface StreamResponse {
                                                                                          content: string;
                                                                                          messages: Message[];
                                                                                          options: Options;
                                                                                          service: string;
                                                                                          thinking?: string;
                                                                                          tool_calls?: ToolCall[];
                                                                                          usage: Usage;
                                                                                      }

                                                                                      Hierarchy (View Summary)

                                                                                      Index

                                                                                      Properties

                                                                                      content +StreamResponse | @themaximalist/llm.js
                                                                                      @themaximalist/llm.js
                                                                                        Preparing search index...

                                                                                        Interface StreamResponse

                                                                                        interface StreamResponse {
                                                                                            content: string;
                                                                                            messages: Message[];
                                                                                            options: Options;
                                                                                            service: string;
                                                                                            thinking?: string;
                                                                                            tool_calls?: ToolCall[];
                                                                                            usage: Usage;
                                                                                        }

                                                                                        Hierarchy (View Summary)

                                                                                        Index

                                                                                        Properties

                                                                                        content: string
                                                                                        messages: Message[]
                                                                                        options: Options
                                                                                        service: string
                                                                                        thinking?: string
                                                                                        tool_calls?: ToolCall[]
                                                                                        usage: Usage
                                                                                        +

                                                                                        Properties

                                                                                        content: string
                                                                                        messages: Message[]
                                                                                        options: Options
                                                                                        service: string
                                                                                        thinking?: string
                                                                                        tool_calls?: ToolCall[]
                                                                                        usage: Usage
                                                                                        diff --git a/public/docs/interfaces/StreamingToolCall.html b/public/docs/interfaces/StreamingToolCall.html index 8884527..e7f30c1 100644 --- a/public/docs/interfaces/StreamingToolCall.html +++ b/public/docs/interfaces/StreamingToolCall.html @@ -1,4 +1,4 @@ -StreamingToolCall | @themaximalist/llm.js
                                                                                        @themaximalist/llm.js
                                                                                          Preparing search index...

                                                                                          Interface StreamingToolCall

                                                                                          interface StreamingToolCall {
                                                                                              id?: string;
                                                                                              input?: string;
                                                                                              name?: string;
                                                                                          }
                                                                                          Index

                                                                                          Properties

                                                                                          id? +StreamingToolCall | @themaximalist/llm.js
                                                                                          @themaximalist/llm.js
                                                                                            Preparing search index...

                                                                                            Interface StreamingToolCall

                                                                                            interface StreamingToolCall {
                                                                                                id?: string;
                                                                                                input?: string;
                                                                                                name?: string;
                                                                                            }
                                                                                            Index

                                                                                            Properties

                                                                                            Properties

                                                                                            id?: string
                                                                                            input?: string
                                                                                            name?: string
                                                                                            +

                                                                                            Properties

                                                                                            id?: string
                                                                                            input?: string
                                                                                            name?: string
                                                                                            diff --git a/public/docs/interfaces/Tool.html b/public/docs/interfaces/Tool.html index 7a2f084..a604540 100644 --- a/public/docs/interfaces/Tool.html +++ b/public/docs/interfaces/Tool.html @@ -1,4 +1,4 @@ -Tool | @themaximalist/llm.js
                                                                                            @themaximalist/llm.js
                                                                                              Preparing search index...

                                                                                              Interface Tool

                                                                                              interface Tool {
                                                                                                  description: string;
                                                                                                  input_schema: any;
                                                                                                  name: string;
                                                                                              }
                                                                                              Index

                                                                                              Properties

                                                                                              description +Tool | @themaximalist/llm.js
                                                                                              @themaximalist/llm.js
                                                                                                Preparing search index...

                                                                                                Interface Tool

                                                                                                interface Tool {
                                                                                                    description: string;
                                                                                                    input_schema: any;
                                                                                                    name: string;
                                                                                                }
                                                                                                Index

                                                                                                Properties

                                                                                                description: string
                                                                                                input_schema: any
                                                                                                name: string
                                                                                                +

                                                                                                Properties

                                                                                                description: string
                                                                                                input_schema: any
                                                                                                name: string
                                                                                                diff --git a/public/docs/interfaces/ToolCall.html b/public/docs/interfaces/ToolCall.html index d7781cb..e178c09 100644 --- a/public/docs/interfaces/ToolCall.html +++ b/public/docs/interfaces/ToolCall.html @@ -1,4 +1,4 @@ -ToolCall | @themaximalist/llm.js
                                                                                                @themaximalist/llm.js
                                                                                                  Preparing search index...

                                                                                                  Interface ToolCall

                                                                                                  interface ToolCall {
                                                                                                      id: string;
                                                                                                      input: any;
                                                                                                      name: string;
                                                                                                  }
                                                                                                  Index

                                                                                                  Properties

                                                                                                  id +ToolCall | @themaximalist/llm.js
                                                                                                  @themaximalist/llm.js
                                                                                                    Preparing search index...

                                                                                                    Interface ToolCall

                                                                                                    interface ToolCall {
                                                                                                        id: string;
                                                                                                        input: any;
                                                                                                        name: string;
                                                                                                    }
                                                                                                    Index

                                                                                                    Properties

                                                                                                    Properties

                                                                                                    id: string
                                                                                                    input: any
                                                                                                    name: string
                                                                                                    +

                                                                                                    Properties

                                                                                                    id: string
                                                                                                    input: any
                                                                                                    name: string
                                                                                                    diff --git a/public/docs/interfaces/Usage.html b/public/docs/interfaces/Usage.html index 0362a25..203c77d 100644 --- a/public/docs/interfaces/Usage.html +++ b/public/docs/interfaces/Usage.html @@ -1,8 +1,8 @@ -Usage | @themaximalist/llm.js
                                                                                                    @themaximalist/llm.js
                                                                                                      Preparing search index...

                                                                                                      Interface Usage

                                                                                                      interface Usage {
                                                                                                          input_cost: number;
                                                                                                          input_tokens: number;
                                                                                                          local: boolean;
                                                                                                          output_cost: number;
                                                                                                          output_tokens: number;
                                                                                                          total_cost: number;
                                                                                                          total_tokens: number;
                                                                                                      }

                                                                                                      Hierarchy (View Summary)

                                                                                                      Index

                                                                                                      Properties

                                                                                                      input_cost +Usage | @themaximalist/llm.js
                                                                                                      @themaximalist/llm.js
                                                                                                        Preparing search index...

                                                                                                        Interface Usage

                                                                                                        interface Usage {
                                                                                                            input_cost: number;
                                                                                                            input_tokens: number;
                                                                                                            local: boolean;
                                                                                                            output_cost: number;
                                                                                                            output_tokens: number;
                                                                                                            total_cost: number;
                                                                                                            total_tokens: number;
                                                                                                        }

                                                                                                        Hierarchy (View Summary)

                                                                                                        Index

                                                                                                        Properties

                                                                                                        input_cost: number
                                                                                                        input_tokens: number
                                                                                                        local: boolean
                                                                                                        output_cost: number
                                                                                                        output_tokens: number
                                                                                                        total_cost: number
                                                                                                        total_tokens: number
                                                                                                        +

                                                                                                        Properties

                                                                                                        input_cost: number
                                                                                                        input_tokens: number
                                                                                                        local: boolean
                                                                                                        output_cost: number
                                                                                                        output_tokens: number
                                                                                                        total_cost: number
                                                                                                        total_tokens: number
                                                                                                        diff --git a/public/docs/interfaces/WrappedTool.html b/public/docs/interfaces/WrappedTool.html index 69248a9..8176f1d 100644 --- a/public/docs/interfaces/WrappedTool.html +++ b/public/docs/interfaces/WrappedTool.html @@ -1,3 +1,3 @@ -WrappedTool | @themaximalist/llm.js
                                                                                                        @themaximalist/llm.js
                                                                                                          Preparing search index...

                                                                                                          Interface WrappedTool

                                                                                                          interface WrappedTool {
                                                                                                              function: { description: string; name: string; parameters: any };
                                                                                                              type: "function";
                                                                                                          }
                                                                                                          Index

                                                                                                          Properties

                                                                                                          function +WrappedTool | @themaximalist/llm.js
                                                                                                          @themaximalist/llm.js
                                                                                                            Preparing search index...

                                                                                                            Interface WrappedTool

                                                                                                            interface WrappedTool {
                                                                                                                function: { description: string; name: string; parameters: any };
                                                                                                                type: "function";
                                                                                                            }
                                                                                                            Index

                                                                                                            Properties

                                                                                                            Properties

                                                                                                            function: { description: string; name: string; parameters: any }
                                                                                                            type: "function"
                                                                                                            +

                                                                                                            Properties

                                                                                                            function: { description: string; name: string; parameters: any }
                                                                                                            type: "function"
                                                                                                            diff --git a/public/docs/interfaces/WrappedToolCall.html b/public/docs/interfaces/WrappedToolCall.html index d025a3f..6fcf0cb 100644 --- a/public/docs/interfaces/WrappedToolCall.html +++ b/public/docs/interfaces/WrappedToolCall.html @@ -1,2 +1,2 @@ -WrappedToolCall | @themaximalist/llm.js
                                                                                                            @themaximalist/llm.js
                                                                                                              Preparing search index...

                                                                                                              Interface WrappedToolCall

                                                                                                              interface WrappedToolCall {
                                                                                                                  function: { arguments: any; id: string; name: string };
                                                                                                              }
                                                                                                              Index

                                                                                                              Properties

                                                                                                              Properties

                                                                                                              function: { arguments: any; id: string; name: string }
                                                                                                              +WrappedToolCall | @themaximalist/llm.js
                                                                                                              @themaximalist/llm.js
                                                                                                                Preparing search index...

                                                                                                                Interface WrappedToolCall

                                                                                                                interface WrappedToolCall {
                                                                                                                    function: { arguments: any; id: string; name: string };
                                                                                                                }
                                                                                                                Index

                                                                                                                Properties

                                                                                                                Properties

                                                                                                                function: { arguments: any; id: string; name: string }
                                                                                                                diff --git a/public/docs/modules.html b/public/docs/modules.html index d5d27e2..27119a0 100644 --- a/public/docs/modules.html +++ b/public/docs/modules.html @@ -1 +1 @@ -@themaximalist/llm.js
                                                                                                                @themaximalist/llm.js
                                                                                                                  Preparing search index...
                                                                                                                  +@themaximalist/llm.js
                                                                                                                  @themaximalist/llm.js
                                                                                                                    Preparing search index...
                                                                                                                    diff --git a/public/docs/types/AttachmentType.html b/public/docs/types/AttachmentType.html new file mode 100644 index 0000000..c6882c2 --- /dev/null +++ b/public/docs/types/AttachmentType.html @@ -0,0 +1 @@ +AttachmentType | @themaximalist/llm.js
                                                                                                                    @themaximalist/llm.js
                                                                                                                      Preparing search index...

                                                                                                                      Type Alias AttachmentType

                                                                                                                      AttachmentType: "image" | "document"
                                                                                                                      diff --git a/public/docs/types/Input.html b/public/docs/types/Input.html index c5aa595..a2f265d 100644 --- a/public/docs/types/Input.html +++ b/public/docs/types/Input.html @@ -1 +1 @@ -Input | @themaximalist/llm.js
                                                                                                                      @themaximalist/llm.js
                                                                                                                        Preparing search index...

                                                                                                                        Type Alias Input

                                                                                                                        Input: string | Message[]
                                                                                                                        +Input | @themaximalist/llm.js
                                                                                                                        @themaximalist/llm.js
                                                                                                                          Preparing search index...

                                                                                                                          Type Alias Input

                                                                                                                          Input: string | Message[]
                                                                                                                          diff --git a/public/docs/types/LLMServices.html b/public/docs/types/LLMServices.html index a8bb78f..3fa0f0d 100644 --- a/public/docs/types/LLMServices.html +++ b/public/docs/types/LLMServices.html @@ -1 +1 @@ -LLMServices | @themaximalist/llm.js
                                                                                                                          @themaximalist/llm.js
                                                                                                                            Preparing search index...

                                                                                                                            Type Alias LLMServices

                                                                                                                            LLMServices:
                                                                                                                                | Anthropic
                                                                                                                                | Ollama
                                                                                                                                | OpenAI
                                                                                                                                | Google
                                                                                                                                | xAI
                                                                                                                                | Groq
                                                                                                                                | DeepSeek
                                                                                                                                | APIv1
                                                                                                                            +LLMServices | @themaximalist/llm.js
                                                                                                                            @themaximalist/llm.js
                                                                                                                              Preparing search index...

                                                                                                                              Type Alias LLMServices

                                                                                                                              LLMServices:
                                                                                                                                  | Anthropic
                                                                                                                                  | Ollama
                                                                                                                                  | OpenAI
                                                                                                                                  | Google
                                                                                                                                  | xAI
                                                                                                                                  | Groq
                                                                                                                                  | DeepSeek
                                                                                                                                  | APIv1
                                                                                                                              diff --git a/public/docs/types/MessageContent.html b/public/docs/types/MessageContent.html index ce37098..f0ab15d 100644 --- a/public/docs/types/MessageContent.html +++ b/public/docs/types/MessageContent.html @@ -1 +1 @@ -MessageContent | @themaximalist/llm.js
                                                                                                                              @themaximalist/llm.js
                                                                                                                                Preparing search index...

                                                                                                                                Type Alias MessageContent

                                                                                                                                MessageContent: string | Tool | any
                                                                                                                                +MessageContent | @themaximalist/llm.js
                                                                                                                                @themaximalist/llm.js
                                                                                                                                  Preparing search index...

                                                                                                                                  Type Alias MessageContent

                                                                                                                                  MessageContent: string | Tool | any
                                                                                                                                  diff --git a/public/docs/types/MessageRole.html b/public/docs/types/MessageRole.html index c3aad0b..e1893c3 100644 --- a/public/docs/types/MessageRole.html +++ b/public/docs/types/MessageRole.html @@ -1 +1 @@ -MessageRole | @themaximalist/llm.js
                                                                                                                                  @themaximalist/llm.js
                                                                                                                                    Preparing search index...

                                                                                                                                    Type Alias MessageRole

                                                                                                                                    MessageRole: "user" | "assistant" | "system" | "thinking" | "tool_call"
                                                                                                                                    +MessageRole | @themaximalist/llm.js
                                                                                                                                    @themaximalist/llm.js
                                                                                                                                      Preparing search index...

                                                                                                                                      Type Alias MessageRole

                                                                                                                                      MessageRole: "user" | "assistant" | "system" | "thinking" | "tool_call"
                                                                                                                                      diff --git a/public/docs/types/Model.html b/public/docs/types/Model.html index 63fab33..3a45565 100644 --- a/public/docs/types/Model.html +++ b/public/docs/types/Model.html @@ -1 +1 @@ -Model | @themaximalist/llm.js
                                                                                                                                      @themaximalist/llm.js
                                                                                                                                        Preparing search index...

                                                                                                                                        Type Alias Model

                                                                                                                                        Model: ModelUsageType & { created?: Date; name?: string; raw?: any }
                                                                                                                                        +Model | @themaximalist/llm.js
                                                                                                                                        @themaximalist/llm.js
                                                                                                                                          Preparing search index...

                                                                                                                                          Type Alias Model

                                                                                                                                          Model: ModelUsageType & { created?: Date; name?: string; raw?: any }
                                                                                                                                          diff --git a/public/docs/types/ParserResponse.html b/public/docs/types/ParserResponse.html index f1f4692..4a4e4c6 100644 --- a/public/docs/types/ParserResponse.html +++ b/public/docs/types/ParserResponse.html @@ -1 +1 @@ -ParserResponse | @themaximalist/llm.js
                                                                                                                                          @themaximalist/llm.js
                                                                                                                                            Preparing search index...

                                                                                                                                            Type Alias ParserResponse

                                                                                                                                            ParserResponse: (chunk: any) => string | InputOutputTokens | ToolCall[] | null

                                                                                                                                            Type declaration

                                                                                                                                            +ParserResponse | @themaximalist/llm.js
                                                                                                                                            @themaximalist/llm.js
                                                                                                                                              Preparing search index...

                                                                                                                                              Type Alias ParserResponse

                                                                                                                                              ParserResponse: (chunk: any) => string | InputOutputTokens | ToolCall[] | null

                                                                                                                                              Type declaration

                                                                                                                                              diff --git a/public/docs/types/QualityFilter.html b/public/docs/types/QualityFilter.html index d1cb5c4..b684cbf 100644 --- a/public/docs/types/QualityFilter.html +++ b/public/docs/types/QualityFilter.html @@ -1,4 +1,4 @@ -QualityFilter | @themaximalist/llm.js
                                                                                                                                              @themaximalist/llm.js
                                                                                                                                                Preparing search index...

                                                                                                                                                Type Alias QualityFilter

                                                                                                                                                type QualityFilter = {
                                                                                                                                                    allowSimilar?: boolean;
                                                                                                                                                    allowUnknown?: boolean;
                                                                                                                                                    topModels?: boolean;
                                                                                                                                                }
                                                                                                                                                Index

                                                                                                                                                Properties

                                                                                                                                                allowSimilar? +QualityFilter | @themaximalist/llm.js
                                                                                                                                                @themaximalist/llm.js
                                                                                                                                                  Preparing search index...

                                                                                                                                                  Type Alias QualityFilter

                                                                                                                                                  type QualityFilter = {
                                                                                                                                                      allowSimilar?: boolean;
                                                                                                                                                      allowUnknown?: boolean;
                                                                                                                                                      topModels?: boolean;
                                                                                                                                                  }
                                                                                                                                                  Index

                                                                                                                                                  Properties

                                                                                                                                                  allowSimilar?: boolean
                                                                                                                                                  allowUnknown?: boolean
                                                                                                                                                  topModels?: boolean
                                                                                                                                                  +

                                                                                                                                                  Properties

                                                                                                                                                  allowSimilar?: boolean
                                                                                                                                                  allowUnknown?: boolean
                                                                                                                                                  topModels?: boolean
                                                                                                                                                  diff --git a/public/docs/types/ServiceName.html b/public/docs/types/ServiceName.html index 3ea0b1f..b7e2e0b 100644 --- a/public/docs/types/ServiceName.html +++ b/public/docs/types/ServiceName.html @@ -1 +1 @@ -ServiceName | @themaximalist/llm.js
                                                                                                                                                  @themaximalist/llm.js
                                                                                                                                                    Preparing search index...

                                                                                                                                                    Type Alias ServiceName

                                                                                                                                                    ServiceName:
                                                                                                                                                        | "anthropic"
                                                                                                                                                        | "ollama"
                                                                                                                                                        | "openai"
                                                                                                                                                        | "google"
                                                                                                                                                        | "xai"
                                                                                                                                                        | "groq"
                                                                                                                                                        | "deepseek"
                                                                                                                                                        | string
                                                                                                                                                    +ServiceName | @themaximalist/llm.js
                                                                                                                                                    @themaximalist/llm.js
                                                                                                                                                      Preparing search index...

                                                                                                                                                      Type Alias ServiceName

                                                                                                                                                      ServiceName:
                                                                                                                                                          | "anthropic"
                                                                                                                                                          | "ollama"
                                                                                                                                                          | "openai"
                                                                                                                                                          | "google"
                                                                                                                                                          | "xai"
                                                                                                                                                          | "groq"
                                                                                                                                                          | "deepseek"
                                                                                                                                                          | string
                                                                                                                                                      diff --git a/public/docs/variables/default.html b/public/docs/variables/default.html index 2d0968f..6aa46a2 100644 --- a/public/docs/variables/default.html +++ b/public/docs/variables/default.html @@ -1 +1 @@ -default | @themaximalist/llm.js
                                                                                                                                                      @themaximalist/llm.js
                                                                                                                                                        Preparing search index...

                                                                                                                                                        Variable defaultConst

                                                                                                                                                        default: LLMInterface = ...
                                                                                                                                                        +default | @themaximalist/llm.js
                                                                                                                                                        @themaximalist/llm.js
                                                                                                                                                          Preparing search index...

                                                                                                                                                          Variable defaultConst

                                                                                                                                                          default: LLMInterface = ...
                                                                                                                                                          diff --git a/src/Attachment.ts b/src/Attachment.ts index 6a03924..944d0b3 100644 --- a/src/Attachment.ts +++ b/src/Attachment.ts @@ -1,17 +1,73 @@ +export type AttachmentType = "image" | "document"; + export default class Attachment { public data: string; + public type: AttachmentType; public contentType: string; - constructor(data: string, contentType: string) { + constructor(data: string, type: AttachmentType, contentType: string) { this.data = data; + this.type = type; this.contentType = contentType; } - static fromBase64(data: string, contentType: string) { - return new Attachment(data, contentType); + get isImage() { + return this.contentType.startsWith("image/"); + } + + get isDocument() { + return this.contentType === "application/pdf"; + } + + get content() { + return { type: this.type, source: this.source } + } + + get source() { + if (this.contentType === "url") { + return { type: "url", url: this.data } + } else { + return { type: "base64", media_type: this.contentType, data: this.data } + } + } + + static fromBase64(data: string, type: AttachmentType, contentType: string) { + return new Attachment(data, type, contentType); + } + + static fromJPEG(data: string) { + return new Attachment(data, "image", "image/jpeg"); + } + + static fromPNG(data: string) { + return new Attachment(data, "image", "image/png"); + } + + static fromGIF(data: string) { + return new Attachment(data, "image", "image/gif"); + } + + static fromWEBP(data: string) { + return new Attachment(data, "image", "image/webp"); + } + + static fromSVG(data: string) { + return new Attachment(data, "image", "image/svg+xml"); + } + + static fromTIFF(data: string) { + return new Attachment(data, "image", "image/tiff"); + } + + static fromPDF(data: string) { + return new Attachment(data, "document", "application/pdf"); + } + + static fromImageURL(url: string) { + return new Attachment(url, "image", "url"); } - static fromUrl(url: string) { - return new Attachment(url, "url"); + static fromDocumentURL(url: string) { + return new Attachment(url, "document", "url"); } } diff --git a/src/LLM.ts b/src/LLM.ts index e165271..9f9805d 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -424,17 +424,8 @@ export default class LLM { if (messageCopy.role === "thinking" || messageCopy.role === "tool_call") messageCopy.role = "assistant"; if (messageCopy.content.attachments) { - const content = []; - for (const attachment of message.content.attachments) { - if (attachment.contentType === "url") { - content.push({ type: "image", source: { type: "url", url: attachment.data } }) - } else { - content.push({ type: "image", source: { type: "base64", media_type: attachment.contentType, data: attachment.data } }); - } - } - + const content = message.content.attachments.map((attachment: Attachment) => attachment.content); content.push({ type: "text", text: message.content.text }); - messageCopy.content = content; } else if (typeof messageCopy.content !== "string") { messageCopy.content = JSON.stringify(messageCopy.content); diff --git a/src/index.ts b/src/index.ts index 88dd62e..c31a09f 100644 --- a/src/index.ts +++ b/src/index.ts @@ -45,13 +45,14 @@ export type { parsers }; */ export type { ModelUsage }; -export type { Input, Message }; +export type { Input, Message, Attachment }; export type { AnthropicOptions } from "./anthropic"; export type { OllamaOptions } from "./ollama"; export type { OpenAIOptions, OpenAITool } from "./openai"; export type { GoogleOptions, GoogleTool } from "./google"; export type { GroqOptions } from "./groq"; export type { APIv1Options } from "./APIv1"; +export type { AttachmentType } from "./Attachment"; let SERVICES = [Anthropic, Ollama, OpenAI, Google, xAI, Groq, DeepSeek]; diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 81eec42..331c87f 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -6,6 +6,11 @@ import type { Response } from "../src/LLM.types.js"; import currentService from "./currentService.js"; const taco = readFileSync("./test/taco.jpg", "base64"); +const tacoAttachment = LLM.Attachment.fromJPEG(taco); + +const dummy = readFileSync("./test/dummy.pdf", "base64"); +const dummyAttachment = LLM.Attachment.fromPDF(dummy); + // document in readme // all services @@ -22,9 +27,9 @@ describe("image", function () { if (service === "google") max_tokens = 5048; // google returns no response if max_tokens is hit! it(`${service} base64 image instance`, async function () { - const tacoAttachment = LLM.Attachment.fromBase64(taco, "image/jpeg"); expect(tacoAttachment.data).toBe(taco); expect(tacoAttachment.contentType).toBe("image/jpeg"); + expect(tacoAttachment.isImage).toBe(true); const llm = new LLM({ service, max_tokens: max_tokens }); const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as string; @@ -43,7 +48,6 @@ describe("image", function () { }); it(`${service} base64 shorthand`, async function () { - const tacoAttachment = LLM.Attachment.fromBase64(taco, "image/jpeg"); const response = await LLM("in one word what is this image?", { service, max_tokens, attachments: [tacoAttachment] }) as string; expect(response).toBeDefined(); expect(response.length).toBeGreaterThan(0); @@ -51,10 +55,6 @@ describe("image", function () { }); it(`${service} base64 stream image`, async function () { - const tacoAttachment = LLM.Attachment.fromBase64(taco, "image/jpeg"); - expect(tacoAttachment.data).toBe(taco); - expect(tacoAttachment.contentType).toBe("image/jpeg"); - const llm = new LLM({ service, max_tokens: max_tokens, stream: true }); const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as AsyncGenerator; let buffer = ""; @@ -66,16 +66,31 @@ describe("image", function () { expect(buffer.toLowerCase()).toContain("taco"); }); - it(`${service} image_url`, async function () { - const tacoAttachment = LLM.Attachment.fromUrl("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); - expect(tacoAttachment.data).toBe("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); - expect(tacoAttachment.contentType).toBe("url"); - + it(`${service} image url`, async function () { + const tacoAttachment = LLM.Attachment.fromImageURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); const llm = new LLM({ service, max_tokens: max_tokens }); const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as string; expect(response).toBeDefined(); expect(response.length).toBeGreaterThan(0); expect(response.toLowerCase()).toContain("taco"); }); + + it(`${service} pdf base64`, async function () { + expect(dummyAttachment.isDocument).toBe(true); + const llm = new LLM({ service, max_tokens: max_tokens }); + const response = await llm.chat("please return the first 50 characters of the pdf", { attachments: [dummyAttachment] }) as string; + expect(response).toBeDefined(); + expect(response.length).toBeGreaterThan(0); + expect(response.toLowerCase()).toContain("dummy"); + }); + + it(`${service} pdf url`, async function () { + const dummyAttachment = LLM.Attachment.fromDocumentURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/dummy.pdf"); + const llm = new LLM({ service, max_tokens: max_tokens }); + const response = await llm.chat("please return the first 50 characters of the pdf", { attachments: [dummyAttachment] }) as string; + expect(response).toBeDefined(); + expect(response.length).toBeGreaterThan(0); + expect(response.toLowerCase()).toContain("dummy"); + }); }); }); \ No newline at end of file From 6e953afc70909a942cc75de85b567731290fc920 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 14:24:18 -0500 Subject: [PATCH 05/23] cleanup --- test/attachments.test.ts | 7 ------- 1 file changed, 7 deletions(-) diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 331c87f..a3e64e4 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -2,7 +2,6 @@ import { describe, it, expect } from "vitest"; import { readFileSync } from "fs"; import LLM from "../src/index.js"; -import type { Response } from "../src/LLM.types.js"; import currentService from "./currentService.js"; const taco = readFileSync("./test/taco.jpg", "base64"); @@ -12,12 +11,6 @@ const dummy = readFileSync("./test/dummy.pdf", "base64"); const dummyAttachment = LLM.Attachment.fromPDF(dummy); -// document in readme -// all services - -// pdf from url -// pdf from buffer - describe("image", function () { LLM.services.forEach(s => { const service = s.service; From c38b17a3d2bf56440823f606c5375f6440c0334f Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 15:26:41 -0500 Subject: [PATCH 06/23] openai attachments --- src/Attachment.ts | 8 ++++++-- src/LLM.ts | 15 +++++++++++---- src/openai.ts | 16 +++++++++++++++- test/attachments.test.ts | 11 ++--------- test/currentService.ts | 2 +- 5 files changed, 35 insertions(+), 17 deletions(-) diff --git a/src/Attachment.ts b/src/Attachment.ts index 944d0b3..feb770f 100644 --- a/src/Attachment.ts +++ b/src/Attachment.ts @@ -12,11 +12,15 @@ export default class Attachment { } get isImage() { - return this.contentType.startsWith("image/"); + return this.type === "image"; } get isDocument() { - return this.contentType === "application/pdf"; + return this.type === "document"; + } + + get isURL() { + return this.contentType === "url"; } get content() { diff --git a/src/LLM.ts b/src/LLM.ts index 9f9805d..3a0d275 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -23,6 +23,7 @@ export default class LLM { static DEFAULT_MODEL: string; static isLocal: boolean = false; static isBearerAuth: boolean = false; + static MessageExtendedContentInputKey: string = "text"; service: ServiceName; messages: Message[]; @@ -119,10 +120,11 @@ export default class LLM { addMessage(role: MessageRole, content: MessageContent) { this.messages.push({ role, content }) } user(content: string, attachments?: Attachment[]) { + const key = (this.constructor as typeof LLM).MessageExtendedContentInputKey; if (attachments && attachments.length > 0) { - this.addMessage("user", { type: "text", text: content, attachments }); + this.addMessage("user", { type: key, text: content, attachments }); } else { - this.addMessage("user", { type: "text", text: content }); + this.addMessage("user", { type: key, text: content }); } } assistant(content: string) { this.addMessage("assistant", content) } @@ -424,8 +426,9 @@ export default class LLM { if (messageCopy.role === "thinking" || messageCopy.role === "tool_call") messageCopy.role = "assistant"; if (messageCopy.content.attachments) { - const content = message.content.attachments.map((attachment: Attachment) => attachment.content); - content.push({ type: "text", text: message.content.text }); + const key = (this.constructor as typeof LLM).MessageExtendedContentInputKey; + const content = message.content.attachments.map(this.parseAttachment); + content.push({ type: key, text: message.content.text }); messageCopy.content = content; } else if (typeof messageCopy.content !== "string") { messageCopy.content = JSON.stringify(messageCopy.content); @@ -435,6 +438,10 @@ export default class LLM { }); } + parseAttachment(attachment: Attachment): MessageContent { + return attachment.content; + } + parseOptions(options: Options): Options { if (!options) return {}; return options; diff --git a/src/openai.ts b/src/openai.ts index 57e3a27..f95e253 100644 --- a/src/openai.ts +++ b/src/openai.ts @@ -1,5 +1,6 @@ import LLM from "./LLM"; -import type { Message, Model, Options, ServiceName, ToolCall, Tool } from "./LLM.types"; +import type { Message, Model, Options, ServiceName, ToolCall, Tool, MessageContent } from "./LLM.types"; +import Attachment from "./Attachment"; import { keywordFilter, join } from "./utils"; /** @@ -30,6 +31,7 @@ export default class OpenAI extends LLM { static DEFAULT_BASE_URL: string = "https://api.openai.com/v1"; static DEFAULT_MODEL: string = "gpt-4o-mini"; static isBearerAuth: boolean = true; + static MessageExtendedContentInputKey: string = "input_text"; get chatUrl() { return join(this.baseUrl, "responses") } get modelsUrl() { return join(this.baseUrl, "models") } @@ -155,6 +157,18 @@ export default class OpenAI extends LLM { } as Model; } + parseAttachment(attachment: Attachment): MessageContent { + const data = (attachment.isURL) ? attachment.data : `data:${attachment.contentType};base64,${attachment.data}`; + + if (attachment.isImage) { + return { type: "input_image", image_url: data } + } else if (attachment.isDocument) { + return { type: "input_file", filename: crypto.randomUUID(), file_data: data } + } + + throw new Error("Unsupported attachment type"); + } + filterQualityModel(model: Model): boolean { const keywords = ["audio", "image", "davinci", "babbage", "dall-e", "tts", "whisper", "embedding", "vision", "moderation", "realtime", "computer-use", "transcribe", "instruct", "codex"]; diff --git a/test/attachments.test.ts b/test/attachments.test.ts index a3e64e4..2c1aa1e 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -61,6 +61,8 @@ describe("image", function () { it(`${service} image url`, async function () { const tacoAttachment = LLM.Attachment.fromImageURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); + expect(tacoAttachment.isImage).toBe(true); + expect(tacoAttachment.isURL).toBe(true); const llm = new LLM({ service, max_tokens: max_tokens }); const response = await llm.chat("in one word what is this image?", { attachments: [tacoAttachment] }) as string; expect(response).toBeDefined(); @@ -76,14 +78,5 @@ describe("image", function () { expect(response.length).toBeGreaterThan(0); expect(response.toLowerCase()).toContain("dummy"); }); - - it(`${service} pdf url`, async function () { - const dummyAttachment = LLM.Attachment.fromDocumentURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/dummy.pdf"); - const llm = new LLM({ service, max_tokens: max_tokens }); - const response = await llm.chat("please return the first 50 characters of the pdf", { attachments: [dummyAttachment] }) as string; - expect(response).toBeDefined(); - expect(response.length).toBeGreaterThan(0); - expect(response.toLowerCase()).toContain("dummy"); - }); }); }); \ No newline at end of file diff --git a/test/currentService.ts b/test/currentService.ts index a70c2ab..83b3f47 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -export default "anthropic"; +export default "openai"; // export default undefined; \ No newline at end of file From 21c6440a5e7d36f8ee56fb3802082f8639cf86d4 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 15:35:48 -0500 Subject: [PATCH 07/23] xai images --- src/xai.ts | 13 +++++++++++++ test/attachments.test.ts | 15 ++++++++++++++- test/currentService.ts | 2 +- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/src/xai.ts b/src/xai.ts index 7ce9e21..c9727c9 100644 --- a/src/xai.ts +++ b/src/xai.ts @@ -1,5 +1,6 @@ import type { ServiceName, Options } from "./LLM.types"; import APIv1 from "./APIv1"; +import Attachment from "./Attachment"; /** * @category LLMs @@ -8,4 +9,16 @@ export default class xAI extends APIv1 { static readonly service: ServiceName = "xai"; static DEFAULT_BASE_URL: string = "https://api.x.ai/v1/"; static DEFAULT_MODEL: string = "grok-3"; + + parseAttachment(attachment: Attachment) { + if (attachment.isImage) { + if (attachment.isURL) { + return { type: "image_url", image_url: { url: attachment.data, detail: "high" } } + } else { + return { type: "image_url", image_url: { url: `data:${attachment.contentType};base64,${attachment.data}`, detail: "high" } } + } + } + + throw new Error("Unsupported attachment type"); + } } diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 2c1aa1e..67a0b1b 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -1,4 +1,4 @@ -import { describe, it, expect } from "vitest"; +import { describe, it, expect, beforeEach, afterEach } from "vitest"; import { readFileSync } from "fs"; import LLM from "../src/index.js"; @@ -11,7 +11,18 @@ const dummy = readFileSync("./test/dummy.pdf", "base64"); const dummyAttachment = LLM.Attachment.fromPDF(dummy); +const xAI_DEFAULT = LLM.xAI.DEFAULT_MODEL; + +beforeEach(function () { + LLM.xAI.DEFAULT_MODEL = "grok-2-vision"; +}); + +afterEach(function () { + LLM.xAI.DEFAULT_MODEL = xAI_DEFAULT; +}); + describe("image", function () { + LLM.services.forEach(s => { const service = s.service; @@ -71,6 +82,8 @@ describe("image", function () { }); it(`${service} pdf base64`, async function () { + if (service === "xai") return; + expect(dummyAttachment.isDocument).toBe(true); const llm = new LLM({ service, max_tokens: max_tokens }); const response = await llm.chat("please return the first 50 characters of the pdf", { attachments: [dummyAttachment] }) as string; diff --git a/test/currentService.ts b/test/currentService.ts index 83b3f47..7926ccf 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -export default "openai"; +export default "xai"; // export default undefined; \ No newline at end of file From 3238035bd1902b37006de317bdd5517a04b5b429 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 17:03:50 -0500 Subject: [PATCH 08/23] google --- src/LLM.ts | 27 +++++++++++++--------- src/google.ts | 50 +++++++++++++++++++++++++++++++++++++++- test/attachments.test.ts | 2 +- test/currentService.ts | 2 +- 4 files changed, 67 insertions(+), 14 deletions(-) diff --git a/src/LLM.ts b/src/LLM.ts index 3a0d275..2c80f09 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -152,6 +152,8 @@ export default class LLM { this.resetCache(); + console.log(JSON.stringify(opts, null, 2)); + if (opts.tools && opts.tools.length > 0) this.extended = true; log.debug(`LLM ${this.service} send`); @@ -421,23 +423,26 @@ export default class LLM { parseModel(model: any): Model { throw new Error("parseModel not implemented") } parseMessages(messages: Message[]): Message[] { return messages.map(message => { - const messageCopy = JSON.parse(JSON.stringify(message)); - - if (messageCopy.role === "thinking" || messageCopy.role === "tool_call") messageCopy.role = "assistant"; + const copy = JSON.parse(JSON.stringify(message)); + if (copy.role === "thinking" || copy.role === "tool_call") copy.role = "assistant"; - if (messageCopy.content.attachments) { - const key = (this.constructor as typeof LLM).MessageExtendedContentInputKey; - const content = message.content.attachments.map(this.parseAttachment); - content.push({ type: key, text: message.content.text }); - messageCopy.content = content; - } else if (typeof messageCopy.content !== "string") { - messageCopy.content = JSON.stringify(messageCopy.content); + if (message.content.attachments) { + copy.content = this.parseAttachmentsContent(message.content); + } else if (typeof copy.content !== "string") { + copy.content = JSON.stringify(copy.content); } - return messageCopy; + return copy; }); } + parseAttachmentsContent(content: MessageContent): MessageContent[] { + const key = (this.constructor as typeof LLM).MessageExtendedContentInputKey; + const parts = content.attachments.map(this.parseAttachment); + parts.push({ type: key, text: content.text }); + return parts; + } + parseAttachment(attachment: Attachment): MessageContent { return attachment.content; } diff --git a/src/google.ts b/src/google.ts index 10b913e..9fa9a81 100644 --- a/src/google.ts +++ b/src/google.ts @@ -1,5 +1,6 @@ import LLM from "./LLM"; -import type { ServiceName, Options, Model, ToolCall, Tool } from "./LLM.types"; +import Attachment from "./Attachment"; +import type { ServiceName, Options, Model, ToolCall, Tool, MessageContent, Message } from "./LLM.types"; import { filterMessageRole, filterNotMessageRole, keywordFilter, uuid, join } from "./utils"; /** @@ -95,6 +96,21 @@ export default class Google extends LLM { return options; } + parseMessages(messages: Message[]): Message[] { + return messages.map(message => { + const copy = JSON.parse(JSON.stringify(message)); + if (copy.role === "thinking" || copy.role === "tool_call") copy.role = "assistant"; + + if (message.content.attachments) { + copy.content = this.parseAttachmentsContent(message.content); + } else if (typeof copy.contents !== "string") { + copy.content = JSON.stringify(copy.contents); + } + + return copy; + }); + } + get llmHeaders() { // Google needs endpoint to not send preflight...so we remove "x-" header and we're passing through query params const headers = super.llmHeaders; @@ -144,8 +160,40 @@ export default class Google extends LLM { return ""; } + parseAttachment(attachment: Attachment): MessageContent { + if (attachment.isImage) { + if (!attachment.isURL) { + return { "inline_data": { "mime_type": attachment.contentType, "data": `'${attachment.data}'` } }; + } + } + + throw new Error("Unsupported attachment type"); + } + + parseAttachmentsContent(content: MessageContent): MessageContent[] { + console.log("CONTENT", content); + const parts = content.attachments.map(this.parseAttachment); + // const parts = content.attachments.map(this.parseAttachment); + // parts.push({ text: content.text }); + // return parts; + + return parts; + } + + + filterQualityModel(model: Model): boolean { const keywords = ["embedding", "vision", "learnlm", "image-generation", "gemma-3", "gemma-3n", "gemini-1.5", "embedding"]; return keywordFilter(model.model, keywords); } + + // if (system.length > 0) { options.system_instruction = { parts: system.map(message => ({ text: message.content })) } } + // if (nonSystem.length > 0) { options.contents = nonSystem.map(message => ({ role: message.role, parts: [{ text: message.content }] })) } + + static toGoogleMessage(message: Message): GoogleMessage { + return { + role: message.role, + parts: [{ text: message.content }], + }; + } } diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 67a0b1b..9bd0c44 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -30,7 +30,7 @@ describe("image", function () { if (currentService && service !== currentService) return; if (service === "google") max_tokens = 5048; // google returns no response if max_tokens is hit! - it(`${service} base64 image instance`, async function () { + it.only(`${service} base64 image instance`, async function () { expect(tacoAttachment.data).toBe(taco); expect(tacoAttachment.contentType).toBe("image/jpeg"); expect(tacoAttachment.isImage).toBe(true); diff --git a/test/currentService.ts b/test/currentService.ts index 7926ccf..caae9a9 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -export default "xai"; +export default "google"; // export default undefined; \ No newline at end of file From 7b9c245b09c8a95ce00a95f43be15913a7e4297e Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 17:28:45 -0500 Subject: [PATCH 09/23] google image started --- src/LLM.ts | 4 +- src/google.ts | 87 +++++++++++++++++++++++++++------------- test/attachments.test.ts | 3 ++ 3 files changed, 64 insertions(+), 30 deletions(-) diff --git a/src/LLM.ts b/src/LLM.ts index 2c80f09..6296be0 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -152,7 +152,7 @@ export default class LLM { this.resetCache(); - console.log(JSON.stringify(opts, null, 2)); + console.log("LLM OPTIONS", JSON.stringify(opts, null, 2)); if (opts.tools && opts.tools.length > 0) this.extended = true; @@ -429,7 +429,7 @@ export default class LLM { if (message.content.attachments) { copy.content = this.parseAttachmentsContent(message.content); } else if (typeof copy.content !== "string") { - copy.content = JSON.stringify(copy.content); + // copy.content = JSON.stringify(copy.content); } return copy; diff --git a/src/google.ts b/src/google.ts index 9fa9a81..aaf44b9 100644 --- a/src/google.ts +++ b/src/google.ts @@ -7,8 +7,8 @@ import { filterMessageRole, filterNotMessageRole, keywordFilter, uuid, join } fr * @category Message */ export interface GoogleMessage { - role: "user" | "model" | "assistant"; - content: string; + role: "user" | "model"; + parts: Array<{ text?: string } | { inline_data?: { mime_type: string; data: string } }>; } /** @@ -27,9 +27,7 @@ export interface GoogleOptions extends Options { system_instruction?: { parts: { text: string }[]; } - contents?: { - parts: { text: string }[]; - }[]; + contents?: GoogleMessage[]; generationConfig?: { temperature?: number; maxOutputTokens?: number; @@ -57,7 +55,7 @@ export default class Google extends LLM { getModelsUrl() { return `${this.modelsUrl}?key=${this.apiKey}` } parseOptions(options: GoogleOptions): GoogleOptions { - const messages = JSON.parse(JSON.stringify(options.messages || [])).map((m: GoogleMessage) => { + const messages = JSON.parse(JSON.stringify(options.messages || [])).map((m: any) => { if (m.role === "assistant") m.role = "model"; return m; }); @@ -67,7 +65,7 @@ export default class Google extends LLM { delete options.messages; if (system.length > 0) { options.system_instruction = { parts: system.map(message => ({ text: message.content })) } } - if (nonSystem.length > 0) { options.contents = nonSystem.map(message => ({ role: message.role, parts: [{ text: message.content }] })) } + if (nonSystem.length > 0) { options.contents = nonSystem.map(Google.toGoogleMessage) } if (!options.generationConfig) options.generationConfig = {}; if (typeof options.temperature === "number") options.generationConfig.temperature = options.temperature; @@ -101,10 +99,9 @@ export default class Google extends LLM { const copy = JSON.parse(JSON.stringify(message)); if (copy.role === "thinking" || copy.role === "tool_call") copy.role = "assistant"; - if (message.content.attachments) { - copy.content = this.parseAttachmentsContent(message.content); - } else if (typeof copy.contents !== "string") { - copy.content = JSON.stringify(copy.contents); + // Don't transform attachments here - toGoogleMessage handles the proper format + if (typeof copy.content !== "string" && !(copy.content && copy.content.attachments)) { + copy.content = JSON.stringify(copy.content); } return copy; @@ -163,37 +160,71 @@ export default class Google extends LLM { parseAttachment(attachment: Attachment): MessageContent { if (attachment.isImage) { if (!attachment.isURL) { - return { "inline_data": { "mime_type": attachment.contentType, "data": `'${attachment.data}'` } }; + return { "inline_data": { "mime_type": attachment.contentType, "data": attachment.data } }; } } throw new Error("Unsupported attachment type"); } - parseAttachmentsContent(content: MessageContent): MessageContent[] { - console.log("CONTENT", content); - const parts = content.attachments.map(this.parseAttachment); - // const parts = content.attachments.map(this.parseAttachment); - // parts.push({ text: content.text }); - // return parts; - + parseAttachmentsContent(content: MessageContent): any[] { + const parts = content.attachments?.map(this.parseAttachment.bind(this)) || []; + if (content.text) { + parts.push({ text: content.text }); + } return parts; } - - filterQualityModel(model: Model): boolean { const keywords = ["embedding", "vision", "learnlm", "image-generation", "gemma-3", "gemma-3n", "gemini-1.5", "embedding"]; return keywordFilter(model.model, keywords); } - // if (system.length > 0) { options.system_instruction = { parts: system.map(message => ({ text: message.content })) } } - // if (nonSystem.length > 0) { options.contents = nonSystem.map(message => ({ role: message.role, parts: [{ text: message.content }] })) } - static toGoogleMessage(message: Message): GoogleMessage { - return { - role: message.role, - parts: [{ text: message.content }], - }; + if (message.content && typeof message.content === 'object' && message.content.attachments) { + const parts: any[] = []; + + console.log("Processing attachments:", message.content.attachments.length); + + // Add attachments first + for (const attachment of message.content.attachments) { + console.log("Attachment:", { + type: attachment.type, + contentType: attachment.contentType, + isURL: attachment.contentType === "url" + }); + + if (attachment.type === "image" && attachment.contentType !== "url") { + console.log("Adding image attachment to parts"); + parts.push({ + inline_data: { + mime_type: attachment.contentType, + data: attachment.data + } + }); + } else { + console.log("Attachment skipped - type:", attachment.type, "contentType:", attachment.contentType); + } + } + + // Add text if present + if (message.content.text) { + parts.push({ text: message.content.text }); + } + + console.log("Final parts array:", JSON.stringify(parts, null, 2)); + + return { + role: message.role === "assistant" ? "model" : message.role as "user" | "model", + parts + }; + } else { + // Handle text-only messages + const content = typeof message.content === 'string' ? message.content : JSON.stringify(message.content); + return { + role: message.role === "assistant" ? "model" : message.role as "user" | "model", + parts: [{ text: content }] + }; + } } } diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 9bd0c44..f531ed5 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -12,13 +12,16 @@ const dummyAttachment = LLM.Attachment.fromPDF(dummy); const xAI_DEFAULT = LLM.xAI.DEFAULT_MODEL; +const google_DEFAULT = LLM.Google.DEFAULT_MODEL; beforeEach(function () { LLM.xAI.DEFAULT_MODEL = "grok-2-vision"; + LLM.Google.DEFAULT_MODEL = "gemini-2.5-pro"; }); afterEach(function () { LLM.xAI.DEFAULT_MODEL = xAI_DEFAULT; + LLM.Google.DEFAULT_MODEL = google_DEFAULT; }); describe("image", function () { From e0b03696ace2cfa6a7faf59c82e8f106144acf91 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 17:38:14 -0500 Subject: [PATCH 10/23] attachment for google --- src/LLM.ts | 8 ++++---- src/google.ts | 24 ++++++---------------- src/utils.ts | 43 ++++++++++++++++++++++++++++++++++++++++ test/attachments.test.ts | 2 ++ 4 files changed, 55 insertions(+), 22 deletions(-) diff --git a/src/LLM.ts b/src/LLM.ts index 6296be0..e2af9f6 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -6,7 +6,7 @@ import Attachment from "./Attachment"; import type { ModelUsageType } from "./ModelUsage"; import config from "./config"; import * as parsers from "./parsers"; -import { parseStream, handleErrorResponse, isBrowser, isNode, join } from "./utils"; +import { parseStream, handleErrorResponse, isBrowser, isNode, join, deepClone } from "./utils"; import type { ServiceName, Options, InputOutputTokens, Usage, Response, PartialStreamResponse, StreamResponse, QualityFilter, Message, Parsers, Input, Model, MessageRole, ParserResponse, Tool, MessageContent, ToolCall, StreamingToolCall } from "./LLM.types"; @@ -423,13 +423,13 @@ export default class LLM { parseModel(model: any): Model { throw new Error("parseModel not implemented") } parseMessages(messages: Message[]): Message[] { return messages.map(message => { - const copy = JSON.parse(JSON.stringify(message)); + const copy = deepClone(message); if (copy.role === "thinking" || copy.role === "tool_call") copy.role = "assistant"; - if (message.content.attachments) { + if (message.content && message.content.attachments) { copy.content = this.parseAttachmentsContent(message.content); } else if (typeof copy.content !== "string") { - // copy.content = JSON.stringify(copy.content); + copy.content = JSON.stringify(copy.content); } return copy; diff --git a/src/google.ts b/src/google.ts index aaf44b9..3dadc31 100644 --- a/src/google.ts +++ b/src/google.ts @@ -1,7 +1,7 @@ import LLM from "./LLM"; import Attachment from "./Attachment"; import type { ServiceName, Options, Model, ToolCall, Tool, MessageContent, Message } from "./LLM.types"; -import { filterMessageRole, filterNotMessageRole, keywordFilter, uuid, join } from "./utils"; +import { filterMessageRole, filterNotMessageRole, keywordFilter, uuid, join, deepClone } from "./utils"; /** * @category Message @@ -55,7 +55,7 @@ export default class Google extends LLM { getModelsUrl() { return `${this.modelsUrl}?key=${this.apiKey}` } parseOptions(options: GoogleOptions): GoogleOptions { - const messages = JSON.parse(JSON.stringify(options.messages || [])).map((m: any) => { + const messages = deepClone(options.messages || []).map((m: any) => { if (m.role === "assistant") m.role = "model"; return m; }); @@ -92,14 +92,14 @@ export default class Google extends LLM { delete options.stream; return options; - } + } parseMessages(messages: Message[]): Message[] { return messages.map(message => { - const copy = JSON.parse(JSON.stringify(message)); + const copy = deepClone(message); if (copy.role === "thinking" || copy.role === "tool_call") copy.role = "assistant"; - // Don't transform attachments here - toGoogleMessage handles the proper format + // Don't transform attachments here - toGoogleMessage handles them properly if (typeof copy.content !== "string" && !(copy.content && copy.content.attachments)) { copy.content = JSON.stringify(copy.content); } @@ -182,28 +182,18 @@ export default class Google extends LLM { static toGoogleMessage(message: Message): GoogleMessage { if (message.content && typeof message.content === 'object' && message.content.attachments) { + // Handle messages with attachments const parts: any[] = []; - console.log("Processing attachments:", message.content.attachments.length); - // Add attachments first for (const attachment of message.content.attachments) { - console.log("Attachment:", { - type: attachment.type, - contentType: attachment.contentType, - isURL: attachment.contentType === "url" - }); - if (attachment.type === "image" && attachment.contentType !== "url") { - console.log("Adding image attachment to parts"); parts.push({ inline_data: { mime_type: attachment.contentType, data: attachment.data } }); - } else { - console.log("Attachment skipped - type:", attachment.type, "contentType:", attachment.contentType); } } @@ -212,8 +202,6 @@ export default class Google extends LLM { parts.push({ text: message.content.text }); } - console.log("Final parts array:", JSON.stringify(parts, null, 2)); - return { role: message.role === "assistant" ? "model" : message.role as "user" | "model", parts diff --git a/src/utils.ts b/src/utils.ts index 96d16d0..968d5aa 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -149,6 +149,49 @@ export function getenv(key: string) { return environment[key] ?? environment[`VITE_${key}`]; } +/** + * Deep clone an object while preserving class instances and their methods + * Unlike JSON.parse(JSON.stringify()), this preserves prototype chains + */ +export function deepClone(obj: T): T { + if (obj === null || typeof obj !== "object") { + return obj; + } + + if (obj instanceof Date) { + return new Date(obj.getTime()) as T; + } + + if (obj instanceof Array) { + return obj.map((item) => deepClone(item)) as T; + } + + if (obj instanceof RegExp) { + return new RegExp(obj) as T; + } + + // For class instances, preserve the prototype + if (obj.constructor !== Object) { + const cloned = Object.create(Object.getPrototypeOf(obj)); + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + cloned[key] = deepClone((obj as any)[key]); + } + } + return cloned; + } + + // For plain objects + const cloned = {} as T; + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + (cloned as any)[key] = deepClone((obj as any)[key]); + } + } + + return cloned; +} + export function join(...pathSegments: string[]): string { if (pathSegments.length === 0) return "."; diff --git a/test/attachments.test.ts b/test/attachments.test.ts index f531ed5..032e312 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -52,6 +52,8 @@ describe("image", function () { expect(response2.length).toBeGreaterThan(0); expect(response2.toLowerCase()).toContain("yellow"); expect(llm.messages.length).toBe(4); + expect(llm.messages[2].content).toBe("what is the color of the shell?"); + expect(llm.messages[3].content).toBe(response2); }); it(`${service} base64 shorthand`, async function () { From a243dfe0f7a575127e2a6582e3e241ea2d8d684f Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 17:53:41 -0500 Subject: [PATCH 11/23] google broken --- test/chat.test.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/chat.test.ts b/test/chat.test.ts index 8df374c..9f6d7d1 100644 --- a/test/chat.test.ts +++ b/test/chat.test.ts @@ -3,6 +3,9 @@ import LLM from "../src/index.js"; import type { Response } from "../src/LLM.types"; import currentService from "./currentService.js"; +// TODO: Google bug...saving as JSON +// TODO: Converting back and forth in between Google Message format...need to do this before saving to .assistant() + describe("chat", function () { LLM.services.forEach(s => { const service = s.service; @@ -18,7 +21,7 @@ describe("chat", function () { expect(response.toLowerCase()).toContain("blue"); }); - it(`${service} instance`, async function () { + it.only(`${service} instance`, async function () { const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); const response = await llm.send(); expect(response).toBeDefined(); @@ -27,6 +30,7 @@ describe("chat", function () { expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); expect(llm.messages[1].role).toBe("assistant"); expect(llm.messages[1].content.toLowerCase()).toContain("blue"); + // expect(llm.messages[1].content).toBe(response); }); it(`${service} instance chat`, async function () { From 0edda0fc5b54d8376a7937fa8752a01de075a9df Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 17:57:39 -0500 Subject: [PATCH 12/23] parsing google images --- src/google.ts | 7 +++++++ test/messages.test.ts | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 test/messages.test.ts diff --git a/src/google.ts b/src/google.ts index 3dadc31..a5b3dfb 100644 --- a/src/google.ts +++ b/src/google.ts @@ -215,4 +215,11 @@ export default class Google extends LLM { }; } } + + static fromGoogleMessage(googleMessage: GoogleMessage): Message { + const parts = googleMessage.parts; + if (parts.length === 1 && "text" in parts[0]) return { role: googleMessage.role, content: parts[0].text } as Message; + if (parts.length === 2 && "inline_data" in parts[0] && "text" in parts[1]) return { role: googleMessage.role, content: { text: parts[1].text, attachments: [{ type: "image", contentType: parts[0].inline_data.mime_type, data: parts[0].inline_data.data }] } } as Message; + throw new Error("Unsupported message type"); + } } diff --git a/test/messages.test.ts b/test/messages.test.ts new file mode 100644 index 0000000..e9ece9f --- /dev/null +++ b/test/messages.test.ts @@ -0,0 +1,32 @@ +import { describe, it, expect } from "vitest"; +import type { Message } from "../src/LLM.types.js"; +import type { GoogleMessage } from "../src/google.js"; +import LLM from "../src/index.js"; + +describe("messages", function () { + describe("google", function () { + it("text input", function () { + const message = { role: "user", content: "Hello World" } as Message; + const googleMessage = LLM.Google.toGoogleMessage(message); + expect(googleMessage).toEqual({ role: "user", parts: [{ text: "Hello World" }] }); + }); + + it("text output", function () { + const googleMessage = { role: "user", parts: [{ text: "Hello World" }] } as GoogleMessage; + const message = LLM.Google.fromGoogleMessage(googleMessage); + expect(message).toEqual({ role: "user", content: "Hello World" }); + }); + + it("image input", function () { + const message = { role: "user", content: { text: "Hello World", attachments: [{ type: "image", contentType: "image/jpeg", data: "base64" }] } } as Message; + const googleMessage = LLM.Google.toGoogleMessage(message); + expect(googleMessage).toEqual({ role: "user", parts: [{ inline_data: { mime_type: "image/jpeg", data: "base64" } }, { text: "Hello World" }] }); + }); + + it("image output", function () { + const googleMessage = { role: "user", parts: [{ inline_data: { mime_type: "image/jpeg", data: "base64" } }, { text: "Hello World" }] } as GoogleMessage; + const message = LLM.Google.fromGoogleMessage(googleMessage); + expect(message).toEqual({ role: "user", content: { text: "Hello World", attachments: [{ type: "image", contentType: "image/jpeg", data: "base64" }] } }); + }); + }); +}); \ No newline at end of file From 76cf84e01e1c308954c74af31651e7e48c441888 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 18:20:10 -0500 Subject: [PATCH 13/23] google finally working --- src/LLM.ts | 10 ++++--- src/google.ts | 68 ++++++++++++++++++++++++++++------------------- test/chat.test.ts | 3 ++- 3 files changed, 49 insertions(+), 32 deletions(-) diff --git a/src/LLM.ts b/src/LLM.ts index e2af9f6..a844837 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -120,11 +120,11 @@ export default class LLM { addMessage(role: MessageRole, content: MessageContent) { this.messages.push({ role, content }) } user(content: string, attachments?: Attachment[]) { - const key = (this.constructor as typeof LLM).MessageExtendedContentInputKey; if (attachments && attachments.length > 0) { + const key = (this.constructor as typeof LLM).MessageExtendedContentInputKey; this.addMessage("user", { type: key, text: content, attachments }); } else { - this.addMessage("user", { type: key, text: content }); + this.addMessage("user", content); } } assistant(content: string) { this.addMessage("assistant", content) } @@ -152,12 +152,12 @@ export default class LLM { this.resetCache(); - console.log("LLM OPTIONS", JSON.stringify(opts, null, 2)); - if (opts.tools && opts.tools.length > 0) this.extended = true; log.debug(`LLM ${this.service} send`); + console.log("LLM OPTIONS", JSON.stringify(opts, null, 2)); + this.abortController = new AbortController(); const response = await fetch(this.getChatUrl(opts), { @@ -428,6 +428,8 @@ export default class LLM { if (message.content && message.content.attachments) { copy.content = this.parseAttachmentsContent(message.content); + } else if (message.content && message.content.text) { + copy.content = message.content.text; } else if (typeof copy.content !== "string") { copy.content = JSON.stringify(copy.content); } diff --git a/src/google.ts b/src/google.ts index a5b3dfb..7cb0df8 100644 --- a/src/google.ts +++ b/src/google.ts @@ -1,6 +1,6 @@ import LLM from "./LLM"; import Attachment from "./Attachment"; -import type { ServiceName, Options, Model, ToolCall, Tool, MessageContent, Message } from "./LLM.types"; +import type { ServiceName, Options, Model, ToolCall, Tool, MessageContent, Message, MessageRole } from "./LLM.types"; import { filterMessageRole, filterNotMessageRole, keywordFilter, uuid, join, deepClone } from "./utils"; /** @@ -55,44 +55,44 @@ export default class Google extends LLM { getModelsUrl() { return `${this.modelsUrl}?key=${this.apiKey}` } parseOptions(options: GoogleOptions): GoogleOptions { - const messages = deepClone(options.messages || []).map((m: any) => { - if (m.role === "assistant") m.role = "model"; - return m; - }); + const opts = deepClone(options); + + const messages = opts.messages || []; + const system = filterMessageRole(messages, "system"); const nonSystem = filterNotMessageRole(messages, "system"); - delete options.messages; + delete opts.messages; - if (system.length > 0) { options.system_instruction = { parts: system.map(message => ({ text: message.content })) } } - if (nonSystem.length > 0) { options.contents = nonSystem.map(Google.toGoogleMessage) } + if (system.length > 0) { opts.system_instruction = { parts: system.map(message => ({ text: message.content })) } } + if (nonSystem.length > 0) { opts.contents = nonSystem.map(Google.toGoogleMessage) } - if (!options.generationConfig) options.generationConfig = {}; - if (typeof options.temperature === "number") options.generationConfig.temperature = options.temperature; - if (typeof options.max_tokens === "number") options.generationConfig.maxOutputTokens = options.max_tokens; - if (!options.generationConfig.maxOutputTokens) options.generationConfig.maxOutputTokens = this.max_tokens; + if (!opts.generationConfig) opts.generationConfig = {}; + if (typeof opts.temperature === "number") opts.generationConfig.temperature = opts.temperature; + if (typeof opts.max_tokens === "number") opts.generationConfig.maxOutputTokens = opts.max_tokens; + if (!opts.generationConfig.maxOutputTokens) opts.generationConfig.maxOutputTokens = this.max_tokens; - if (options.tools) { - options.tools = [ { functionDeclarations: options.tools.map(tool => ({ + if (opts.tools) { + opts.tools = [ { functionDeclarations: opts.tools.map(tool => ({ name: (tool as Tool).name, description: (tool as Tool).description, parameters: (tool as Tool).input_schema, })) } ] as any; } - if (options.think) { - if (!options.generationConfig) options.generationConfig = {}; - options.generationConfig.thinkingConfig = { includeThoughts: true }; - delete options.think; + if (opts.think) { + if (!opts.generationConfig) opts.generationConfig = {}; + opts.generationConfig.thinkingConfig = { includeThoughts: true }; + delete opts.think; } - delete options.think; - delete options.max_tokens; - delete options.temperature; - delete options.stream; + delete opts.think; + delete opts.max_tokens; + delete opts.temperature; + delete opts.stream; - return options; - } + return opts; + } parseMessages(messages: Message[]): Message[] { return messages.map(message => { @@ -218,8 +218,22 @@ export default class Google extends LLM { static fromGoogleMessage(googleMessage: GoogleMessage): Message { const parts = googleMessage.parts; - if (parts.length === 1 && "text" in parts[0]) return { role: googleMessage.role, content: parts[0].text } as Message; - if (parts.length === 2 && "inline_data" in parts[0] && "text" in parts[1]) return { role: googleMessage.role, content: { text: parts[1].text, attachments: [{ type: "image", contentType: parts[0].inline_data.mime_type, data: parts[0].inline_data.data }] } } as Message; + if (parts.length === 1 && "text" in parts[0] && parts[0].text) { + return { role: googleMessage.role, content: parts[0].text } as Message; + } + if (parts.length === 2 && "inline_data" in parts[0] && "text" in parts[1] && parts[0].inline_data && parts[1].text) { + return { + role: googleMessage.role, + content: { + text: parts[1].text, + attachments: [{ + type: "image", + contentType: parts[0].inline_data.mime_type, + data: parts[0].inline_data.data + }] + } + } as Message; + } throw new Error("Unsupported message type"); - } + } } diff --git a/test/chat.test.ts b/test/chat.test.ts index 9f6d7d1..2f65c85 100644 --- a/test/chat.test.ts +++ b/test/chat.test.ts @@ -30,7 +30,7 @@ describe("chat", function () { expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); expect(llm.messages[1].role).toBe("assistant"); expect(llm.messages[1].content.toLowerCase()).toContain("blue"); - // expect(llm.messages[1].content).toBe(response); + expect(llm.messages[1].content).toBe(response); }); it(`${service} instance chat`, async function () { @@ -42,6 +42,7 @@ describe("chat", function () { expect(llm.messages[0].content).toBe("in one word the color of the sky is usually"); expect(llm.messages[1].role).toBe("assistant"); expect(llm.messages[1].content.toLowerCase()).toContain("blue"); + expect(llm.messages[1].content).toBe(response); }); it(`${service} settings override`, async function () { From 1e715d14b34fa285f28206d67b2eab4dcbfbcf4d Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 18:21:54 -0500 Subject: [PATCH 14/23] cleanup --- test/attachments.test.ts | 10 +++------- test/chat.test.ts | 3 --- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 032e312..997d46d 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -10,18 +10,14 @@ const tacoAttachment = LLM.Attachment.fromJPEG(taco); const dummy = readFileSync("./test/dummy.pdf", "base64"); const dummyAttachment = LLM.Attachment.fromPDF(dummy); - const xAI_DEFAULT = LLM.xAI.DEFAULT_MODEL; -const google_DEFAULT = LLM.Google.DEFAULT_MODEL; beforeEach(function () { LLM.xAI.DEFAULT_MODEL = "grok-2-vision"; - LLM.Google.DEFAULT_MODEL = "gemini-2.5-pro"; }); afterEach(function () { LLM.xAI.DEFAULT_MODEL = xAI_DEFAULT; - LLM.Google.DEFAULT_MODEL = google_DEFAULT; }); describe("image", function () { @@ -33,7 +29,7 @@ describe("image", function () { if (currentService && service !== currentService) return; if (service === "google") max_tokens = 5048; // google returns no response if max_tokens is hit! - it.only(`${service} base64 image instance`, async function () { + it(`${service} base64 image instance`, async function () { expect(tacoAttachment.data).toBe(taco); expect(tacoAttachment.contentType).toBe("image/jpeg"); expect(tacoAttachment.isImage).toBe(true); @@ -75,7 +71,7 @@ describe("image", function () { expect(buffer.toLowerCase()).toContain("taco"); }); - it(`${service} image url`, async function () { + it.skip(`${service} image url`, async function () { const tacoAttachment = LLM.Attachment.fromImageURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); expect(tacoAttachment.isImage).toBe(true); expect(tacoAttachment.isURL).toBe(true); @@ -86,7 +82,7 @@ describe("image", function () { expect(response.toLowerCase()).toContain("taco"); }); - it(`${service} pdf base64`, async function () { + it.skip(`${service} pdf base64`, async function () { if (service === "xai") return; expect(dummyAttachment.isDocument).toBe(true); diff --git a/test/chat.test.ts b/test/chat.test.ts index 2f65c85..be55c72 100644 --- a/test/chat.test.ts +++ b/test/chat.test.ts @@ -3,9 +3,6 @@ import LLM from "../src/index.js"; import type { Response } from "../src/LLM.types"; import currentService from "./currentService.js"; -// TODO: Google bug...saving as JSON -// TODO: Converting back and forth in between Google Message format...need to do this before saving to .assistant() - describe("chat", function () { LLM.services.forEach(s => { const service = s.service; From d760883b0f6ac379ffd29bacd269f9672dc4eb6a Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 18:27:00 -0500 Subject: [PATCH 15/23] google pdf parsing --- src/google.ts | 4 +++- test/attachments.test.ts | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/google.ts b/src/google.ts index 7cb0df8..2281f3b 100644 --- a/src/google.ts +++ b/src/google.ts @@ -187,13 +187,15 @@ export default class Google extends LLM { // Add attachments first for (const attachment of message.content.attachments) { - if (attachment.type === "image" && attachment.contentType !== "url") { + if (attachment.contentType !== "url") { parts.push({ inline_data: { mime_type: attachment.contentType, data: attachment.data } }); + } else { + throw new Error("URL attachments are not supported with Google"); } } diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 997d46d..974f655 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -71,7 +71,9 @@ describe("image", function () { expect(buffer.toLowerCase()).toContain("taco"); }); - it.skip(`${service} image url`, async function () { + it(`${service} image url`, async function () { + if (service === "google") return; + const tacoAttachment = LLM.Attachment.fromImageURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); expect(tacoAttachment.isImage).toBe(true); expect(tacoAttachment.isURL).toBe(true); @@ -82,7 +84,7 @@ describe("image", function () { expect(response.toLowerCase()).toContain("taco"); }); - it.skip(`${service} pdf base64`, async function () { + it(`${service} pdf base64`, async function () { if (service === "xai") return; expect(dummyAttachment.isDocument).toBe(true); From 55a06e2acd8c58e6989ddb6400ca078e5172f1d8 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 18:33:58 -0500 Subject: [PATCH 16/23] groq images --- src/APIv1.ts | 13 +++++++++++++ src/xai.ts | 15 +-------------- test/attachments.test.ts | 6 +++++- test/currentService.ts | 2 +- 4 files changed, 20 insertions(+), 16 deletions(-) diff --git a/src/APIv1.ts b/src/APIv1.ts index 40c3866..7acb12d 100644 --- a/src/APIv1.ts +++ b/src/APIv1.ts @@ -2,6 +2,7 @@ import LLM from "./LLM"; import type { ServiceName, Options, Model, ToolCall, WrappedToolCall, Tool } from "./LLM.types"; import { unwrapToolCall, wrapTool, join } from "./utils"; import { keywordFilter } from "./utils"; +import Attachment from "./Attachment"; /** * @category Options @@ -105,4 +106,16 @@ export default class APIv1 extends LLM { const keywords = ["audio", "vision", "image"]; return keywordFilter(model.model, keywords); } + + parseAttachment(attachment: Attachment) { + if (attachment.isImage) { + if (attachment.isURL) { + return { type: "image_url", image_url: { url: attachment.data, detail: "high" } } + } else { + return { type: "image_url", image_url: { url: `data:${attachment.contentType};base64,${attachment.data}`, detail: "high" } } + } + } + + throw new Error("Unsupported attachment type"); + } } \ No newline at end of file diff --git a/src/xai.ts b/src/xai.ts index c9727c9..30d2eb1 100644 --- a/src/xai.ts +++ b/src/xai.ts @@ -1,6 +1,5 @@ -import type { ServiceName, Options } from "./LLM.types"; +import type { ServiceName } from "./LLM.types"; import APIv1 from "./APIv1"; -import Attachment from "./Attachment"; /** * @category LLMs @@ -9,16 +8,4 @@ export default class xAI extends APIv1 { static readonly service: ServiceName = "xai"; static DEFAULT_BASE_URL: string = "https://api.x.ai/v1/"; static DEFAULT_MODEL: string = "grok-3"; - - parseAttachment(attachment: Attachment) { - if (attachment.isImage) { - if (attachment.isURL) { - return { type: "image_url", image_url: { url: attachment.data, detail: "high" } } - } else { - return { type: "image_url", image_url: { url: `data:${attachment.contentType};base64,${attachment.data}`, detail: "high" } } - } - } - - throw new Error("Unsupported attachment type"); - } } diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 974f655..28caced 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -11,16 +11,19 @@ const dummy = readFileSync("./test/dummy.pdf", "base64"); const dummyAttachment = LLM.Attachment.fromPDF(dummy); const xAI_DEFAULT = LLM.xAI.DEFAULT_MODEL; +const groq_DEFAULT = LLM.Groq.DEFAULT_MODEL; beforeEach(function () { LLM.xAI.DEFAULT_MODEL = "grok-2-vision"; + LLM.Groq.DEFAULT_MODEL = "meta-llama/llama-4-scout-17b-16e-instruct"; }); afterEach(function () { LLM.xAI.DEFAULT_MODEL = xAI_DEFAULT; + LLM.Groq.DEFAULT_MODEL = groq_DEFAULT; }); -describe("image", function () { +describe("attachments", function () { LLM.services.forEach(s => { const service = s.service; @@ -86,6 +89,7 @@ describe("image", function () { it(`${service} pdf base64`, async function () { if (service === "xai") return; + if (service === "groq") return; expect(dummyAttachment.isDocument).toBe(true); const llm = new LLM({ service, max_tokens: max_tokens }); diff --git a/test/currentService.ts b/test/currentService.ts index caae9a9..02df2c9 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -export default "google"; +export default "groq"; // export default undefined; \ No newline at end of file From f0dad63e7b1c527d57427eca8ccae0e53a0822ad Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 22:08:44 -0500 Subject: [PATCH 17/23] ollama images --- src/LLM.ts | 2 -- src/ollama.ts | 53 +++++++++++++++++++++++++++++++++++++++- test/attachments.test.ts | 6 +++++ test/currentService.ts | 4 +-- test/vitest.config.ts | 2 +- 5 files changed, 61 insertions(+), 6 deletions(-) diff --git a/src/LLM.ts b/src/LLM.ts index a844837..eda9ddb 100644 --- a/src/LLM.ts +++ b/src/LLM.ts @@ -156,8 +156,6 @@ export default class LLM { log.debug(`LLM ${this.service} send`); - console.log("LLM OPTIONS", JSON.stringify(opts, null, 2)); - this.abortController = new AbortController(); const response = await fetch(this.getChatUrl(opts), { diff --git a/src/ollama.ts b/src/ollama.ts index 8bc7d91..89d77b3 100644 --- a/src/ollama.ts +++ b/src/ollama.ts @@ -1,6 +1,16 @@ import LLM from "./LLM"; -import type { Options, Model, ServiceName, ToolCall, Tool, WrappedToolCall } from "./LLM.types"; +import type { Options, Model, ServiceName, ToolCall, Tool, WrappedToolCall, MessageContent, Message } from "./LLM.types"; import { unwrapToolCall, wrapTool, join } from "./utils"; +import Attachment from "./Attachment"; + +/** + * @category Message + */ +export interface OllamaMessage { + role: string; + content?: string; + images?: string[]; +} /** * @category Options @@ -118,4 +128,45 @@ export default class Ollama extends LLM { const response = await fetch(`${this.baseUrl}`); return await response.text() === "Ollama is running"; } + + parseMessages(messages: Message[]): Message[] { + const msgs = [] as OllamaMessage[]; + for (const message of messages) { + let added = false; + + if (message.role === "thinking" || message.role === "tool_call") message.role = "assistant"; + + if (message.role && message.content.text) { + msgs.push({ "role": message.role, "content": message.content.text }); + added = true; + } + + if (message.role && message.content.attachments) { + msgs.push({ "role": message.role, "images": message.content.attachments.map(this.parseAttachment) }); + added = true; + } + + if (!added) { + msgs.push(message); + } + } + + return msgs as Message[]; + } + + parseAttachmentsContent(content: MessageContent): MessageContent[] { + return [{ + "role": "user", + "content": content.text, + "images": content.attachments.map(this.parseAttachment) + }] + } + + parseAttachment(attachment: Attachment): MessageContent { + if (attachment.isImage && !attachment.isURL) { + return attachment.data; + } + + throw new Error("Unsupported attachment type"); + } } diff --git a/test/attachments.test.ts b/test/attachments.test.ts index 28caced..a83a6d3 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -12,21 +12,25 @@ const dummyAttachment = LLM.Attachment.fromPDF(dummy); const xAI_DEFAULT = LLM.xAI.DEFAULT_MODEL; const groq_DEFAULT = LLM.Groq.DEFAULT_MODEL; +const ollama_DEFAULT = LLM.Ollama.DEFAULT_MODEL; beforeEach(function () { LLM.xAI.DEFAULT_MODEL = "grok-2-vision"; LLM.Groq.DEFAULT_MODEL = "meta-llama/llama-4-scout-17b-16e-instruct"; + LLM.Ollama.DEFAULT_MODEL = "gemma3:4b"; }); afterEach(function () { LLM.xAI.DEFAULT_MODEL = xAI_DEFAULT; LLM.Groq.DEFAULT_MODEL = groq_DEFAULT; + LLM.Ollama.DEFAULT_MODEL = ollama_DEFAULT; }); describe("attachments", function () { LLM.services.forEach(s => { const service = s.service; + if (service === "deepseek") return; let max_tokens = 200; if (currentService && service !== currentService) return; @@ -76,6 +80,7 @@ describe("attachments", function () { it(`${service} image url`, async function () { if (service === "google") return; + if (service === "ollama") return; const tacoAttachment = LLM.Attachment.fromImageURL("https://raw.githubusercontent.com/themaximalist/llm.js/refs/heads/main/test/taco.jpg"); expect(tacoAttachment.isImage).toBe(true); @@ -90,6 +95,7 @@ describe("attachments", function () { it(`${service} pdf base64`, async function () { if (service === "xai") return; if (service === "groq") return; + if (service === "ollama") return; expect(dummyAttachment.isDocument).toBe(true); const llm = new LLM({ service, max_tokens: max_tokens }); diff --git a/test/currentService.ts b/test/currentService.ts index 02df2c9..ee6839a 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -export default "groq"; -// export default undefined; \ No newline at end of file +// export default "ollama"; +export default undefined; \ No newline at end of file diff --git a/test/vitest.config.ts b/test/vitest.config.ts index 94719ad..8a529da 100644 --- a/test/vitest.config.ts +++ b/test/vitest.config.ts @@ -6,7 +6,7 @@ export default defineConfig({ slowTestThreshold: 15000, setupFiles: ["./test/setup.ts"], bail: 1, - retry: 0, + retry: 5, reporters: ["dot"], printConsoleTrace: true, }, From 805af4300d18f27ab1831abde4af445df84ee3ed Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Sun, 22 Jun 2025 22:09:45 -0500 Subject: [PATCH 18/23] clean --- README.md | 2 +- test/chat.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index b093423..5f848ec 100644 --- a/README.md +++ b/README.md @@ -795,7 +795,7 @@ blue ## Changelog -- 06/14/2025 — `v1.0.1` — Better model features support and tags +- 06/22/2025 — `v1.0.1` — Attachment support (images and PDF), Better model features support and tags - 06/13/2025 — `v1.0.0` — Added thinking mode, extended responses, token/cost usage, model management, TypeScript. Removed Together, Perplexity, Llamafile - 01/27/2025 — `v0.8.0` — Added DeepSeek - 12/19/2024 — `v0.7.1` — Fixed Anthropic streaming bug diff --git a/test/chat.test.ts b/test/chat.test.ts index be55c72..523c008 100644 --- a/test/chat.test.ts +++ b/test/chat.test.ts @@ -18,7 +18,7 @@ describe("chat", function () { expect(response.toLowerCase()).toContain("blue"); }); - it.only(`${service} instance`, async function () { + it(`${service} instance`, async function () { const llm = new LLM("in one word the color of the sky is usually", { max_tokens: max_tokens, service }); const response = await llm.send(); expect(response).toBeDefined(); From ffc7eca4f2908d0f89ee009858b1402c9c7ae532 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Mon, 23 Jun 2025 16:54:53 -0500 Subject: [PATCH 19/23] cleanup --- test/currentService.ts | 2 +- test/vitest.config.ts | 2 +- vite.config.ts | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test/currentService.ts b/test/currentService.ts index ee6839a..840fb9e 100644 --- a/test/currentService.ts +++ b/test/currentService.ts @@ -1,3 +1,3 @@ // helpful for testing a single service -// export default "ollama"; +// export default "openai"; export default undefined; \ No newline at end of file diff --git a/test/vitest.config.ts b/test/vitest.config.ts index 8a529da..a6b1b98 100644 --- a/test/vitest.config.ts +++ b/test/vitest.config.ts @@ -2,7 +2,7 @@ import { defineConfig } from "vitest/config"; export default defineConfig({ test: { - testTimeout: 40000, + testTimeout: 60000, slowTestThreshold: 15000, setupFiles: ["./test/setup.ts"], bail: 1, diff --git a/vite.config.ts b/vite.config.ts index d3df796..81edaa0 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -58,11 +58,11 @@ export default defineConfig({ projects: [ { test: { - testTimeout: 40000, + testTimeout: 60000, name: 'browser', setupFiles: ["./test/setup.ts"], bail: 1, - retry: 0, + retry: 5, printConsoleTrace: true, browser: { enabled: true, From e0aad05a35c34b4f75be80696a6e6b40f285d424 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Mon, 23 Jun 2025 17:05:01 -0500 Subject: [PATCH 20/23] update docs for attachments --- README.md | 44 ++ data/model_prices_and_context_window.json | 15 +- public/docs/assets/hierarchy.js | 2 +- public/docs/assets/navigation.js | 2 +- public/docs/assets/search.js | 2 +- public/docs/classes/APIv1.html | 7 +- public/docs/classes/Anthropic.html | 7 +- public/docs/classes/Attachment.html | 3 +- public/docs/classes/DeepSeek.html | 7 +- public/docs/classes/Google.html | 11 +- public/docs/classes/Groq.html | 7 +- public/docs/classes/LLM.html | 5 +- public/docs/classes/Ollama.html | 9 +- public/docs/classes/OpenAI.html | 7 +- public/docs/classes/xAI.html | 7 +- public/docs/hierarchy.html | 2 +- public/docs/interfaces/AnthropicOptions.html | 2 +- public/docs/interfaces/GoogleMessage.html | 3 + public/docs/interfaces/GoogleOptions.html | 8 +- public/docs/interfaces/GoogleTool.html | 4 +- public/docs/interfaces/OllamaOptions.html | 6 +- public/docs/interfaces/OpenAIOptions.html | 8 +- public/docs/interfaces/OpenAITool.html | 4 +- public/docs/modules.html | 2 +- public/docs/types/APIv1Options.html | 2 +- public/index.html | 456 ++++++++++--------- src/index.ts | 2 +- 27 files changed, 379 insertions(+), 255 deletions(-) create mode 100644 public/docs/interfaces/GoogleMessage.html diff --git a/README.md b/README.md index 5f848ec..0d8ee4c 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ await LLM("the color of the sky is"); // blue * [Stream](#streaming) responses instantly (including with thinking, tools, parsers) * [Thinking](#thinking) with reasoning models * [Tools](#tools) to call custom functions +* [Attachments](#attachments) to send images, documents, and other files * [Parsers](#parsers) including `JSON`, `XML`, `codeBlock` * [Token Usage](#token-usage) input and output tokens on every request * [Model List](#models) for dynamic up-to-date list of latest models @@ -302,6 +303,49 @@ const completed = await response.complete(); // { content: { color: "blue" } } ``` +## Attachments + +Send images, documents, and other files alongside your prompts using [attachments](/docs/classes/Attachment.html): + +```javascript +// Image from base64 data +const data = fs.readFileSync("file.jpg", "base64"); +const image = LLM.Attachment.fromJPEG(data); + +const response = await LLM("What's in this image?", { attachments: [image] }); +``` + +Create attachments from different sources: + +```javascript +// From base64 data +const jpeg = LLM.Attachment.fromJPEG(base64Data); +const pdf = LLM.Attachment.fromPDF(base64Data); + +// From image URL +const image = LLM.Attachment.fromImageURL("https://example.com/image.jpg"); + +// Use with chat +const llm = new LLM(); +await llm.chat("Describe this image", { attachments: [jpeg] }); +await llm.chat("What color is the main object?"); // References previous image +``` + +Attachments work seamlessly with streaming: + +```javascript +const response = await LLM("Analyze this document", { + attachments: [pdf], + stream: true +}); + +for await (const chunk of response) { + process.stdout.write(chunk); +} +``` + +**Note:** Attachment support varies by service. Images are widely supported, Documents (PDF) and Images from URLs are supported by some. + ## Token Usage Every `extended` request automatically tracks [input and output tokens](/docs/interfaces/Usage.html): diff --git a/data/model_prices_and_context_window.json b/data/model_prices_and_context_window.json index 9d72f85..df91855 100644 --- a/data/model_prices_and_context_window.json +++ b/data/model_prices_and_context_window.json @@ -9995,7 +9995,15 @@ "mode": "chat", "supports_tool_choice": true }, - "mistralai/mistral-small-3.1-24b-instruct": { + "openrouter/mistralai/mistral-small-3.1-24b-instruct": { + "max_tokens": 32000, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 3e-07, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_tool_choice": true + }, + "openrouter/mistralai/mistral-small-3.2-24b-instruct": { "max_tokens": 32000, "input_cost_per_token": 1e-07, "output_cost_per_token": 3e-07, @@ -13647,13 +13655,14 @@ "input_cost_per_token": 2e-06, "output_cost_per_token": 8e-06, "output_cost_per_reasoning_token": 3e-06, - "litellm_provider": "perplexity", - "mode": "chat", + "citation_cost_per_token": 2e-06, "search_context_cost_per_query": { "search_context_size_low": 0.005, "search_context_size_medium": 0.005, "search_context_size_high": 0.005 }, + "litellm_provider": "perplexity", + "mode": "chat", "supports_reasoning": true, "supports_web_search": true }, diff --git a/public/docs/assets/hierarchy.js b/public/docs/assets/hierarchy.js index 13ababe..733dd87 100644 --- a/public/docs/assets/hierarchy.js +++ b/public/docs/assets/hierarchy.js @@ -1 +1 @@ -window.hierarchyData = "eJyVlU1r4zAQhv/LnKdbjT8k2bfCQgmkZNnunkoPJlEbE0dyJWUplPz3Rc4mKJvWVi4+2I+fd0bM2B9gjfEO6ichkEpCzlDKZwSrXjq19K3RDuoP4CxcdbNVUMOiP9xH2LR6BXVWcoSd7aCGVntlX5qlcrf/qG9rv+0AYdk1zkEN3q1uwms3JzQ8XLfdyioN9ROJkiEJQUiiIiSZsec9ghBRATPd7/xi5/ud/2U2arKUC/7aouRQg4wP4bdrXtVE7sAkZAW3jNw/leuNdlP6I3b1EbMitEOsiDIfvVXNNjH5HE7rkEqK4ubzh1MGZfKYMSiUu53PH760Hm6cdZQziYUg5DlHUXGkIq9CizmLj/VO+7U1fbscSz5BE/l7hELEHS26rtk2Y+oDMe3lOT/bNqXvZqPegZj2iir23hvz2qkx74GY9lZl7H0fL/Y9pVIiivf93pq30UKteUuQZiKehu9K9Y9KbcbERyZBXuRVPGo/Zn9odMwCcM2IVyXHcCwY2hjWV5Tss+FO+zr/jyfusPhk5BN/BzGbmlbRxSKk/nwiNjFNZuxiPdLSztiUtP3+L3ZlZn4=" \ No newline at end of file +window.hierarchyData = "eJyVlc1OwzAQhN9lzwbiNnY2uSEhoUpFRfycEIeo3dKoqR1sFyGhvjtyQyuHQuJecnC+zMzau84XGK2dheIlyxgXnMmEIb4yMLSsae4qrSwUXyAT/1TlhqCAWdOuM1hXagHFSEgGW1NDAZVyZJblnOzVD3W5cpsaGMzr0loowNnFhf/s4oj6l6uqXhhSULxwTAXjKCXjiJLxnIvXHYMsCwJMVLN1s61rtu5Jr2kwygl/bihMfAYMN+HZlm804LtnIry8NgbaD2QbreyQ/AE7e4uT1JfDkzTwfHSGyk2kcxeOq5ALHthNp3dHDz7Cg8deguzVdHr3r2q70KlozAVLUTApMoaj1Hdy7ksccxF4Xiu3Mrqp5n3OR2jAf8cgxVB9VtflpuyTbolhXSmyzrSRup706u6JYV0chQd+q/VbTX26LTGsm+dh3s/+sJ8xSTnPwma5Nfq9N6jR7xGi4zQ8rxui5pFo3Sd8YCLEBc/DVruffPDeNvPAOS2e5xnz28J8GfvxxfTP5o67nX/jkTOMUp60fOTvIGRj3VCeDELszydgI93yzmXRNn+cW4eNcdvtvgED7mag" \ No newline at end of file diff --git a/public/docs/assets/navigation.js b/public/docs/assets/navigation.js index 1f355a1..2cafec7 100644 --- a/public/docs/assets/navigation.js +++ b/public/docs/assets/navigation.js @@ -1 +1 @@ -window.navigationData = "eJyNl11v2jAUhv+Lr9E6WNtt3HWdNiElg7VUu6h64SUHyHBs13EYaOp/n/JBYmPnOLd+z/PEjk9seP5HNBw1mRNJVQGqIBMiqd6ROclFWjIortrg3U7njEzIPuMpmV9PSLLLWKqAk/lzJ0lECl+YSPa9ZlPyRGeC96KuyFbeXr9NOtGfQnDMUeUInlO1T8VfVHGuQTTHOhg0HHPmwC8Gfsf1TgmZJb0kYbQooLjqIlswnX0y+dXiMPWw1TDKaU2TXQ5ce+AuwwxfAeQjwN7lzwlGfxdiy8Blm3GUVOLVwynxilFRFLtQFMUYE4sU2FNBt5559hlmWDJGc+rSzThKSuB3Cw9Zj2Pk0YcdcaZrtaWs27cXZFyD2tDE7Me2yBbObm6d7cVsVkVYtRaCIZ4qxiQLLku9LLUs9VrsYWBOThWmjKJ4cUa9NrMAE8VQ2D1mONoMw5tewl61VYGq6uZCVWZFWDW4a32MS7CZBOewuryuDHzlu7EcXGeUPWoFNH+AQgpe+HfJW4mpUdsYQfOojG+rV3hPmf8tO1VhJTqz8Qsc3PjQlqPrGbOMp8FvyXNa2+gvRaWEdHDuRj5SM7iSixpMV1/lzpegT/J8zfu/hPefP05vZt4bf32S4JisNOSqbtuBKRlRyFIft5d8PRgioyh+BHXIEnCeb0QhS3u43guurd9BjchOR7oeBHNerREFLdWvCoevBkeRa7r1wmu6HcXXX4ivO+w05GrOVvckaVx2GnL9LCnL9OlbxjSoS5UVhkxtV/yguTMlIwpZUtjQkhnNcqAqo7+rP0FtZBs+zN5e/gPjynDP" \ No newline at end of file +window.navigationData = "eJyNl9Fu2jAUht/F12gdrO027rpWnZCSwQrVLqpeeMkBMhzbdRwGmvruUxJIbOwc59b/+T5ObMcxL/+IhoMmUyKpKkAVZEQk1VsyJblISwbF1Sn4sNU5IyOyy3hKptcjkmwzlirgZPrSShKRwjcmkl2nWZc80ZngnagtspW31++jVvSnEBxzVDmC51TtUvEXVZxrEM2hDnoNh5w58KuB33G9VUJmSSdJGC0KKK7ayBaMJ19MfjHbjz1sNYxyWtNkmwPXHrjNMMMDgFwC7Fz+nGD0dyE2DFy2GUdJJd48nBJvGBVFsQtFUYwxsUiBPRd04+mzyzDDnDGaU5duxlFSAr+bech6HCMPPuyAM+1Wm8t6+3aCjGtQa5qY+/FUZAsnN7fO8sZQ2LNn2KyKsAprzKoIq1ZCMMRTxZhkxmWp56WWpV6JHfT05FRhyiiKZ2fUazMLMBE24QOmutmW2FRbFaiq3qeoyqwIq3pXrYtxCdZJsIfF5ZfPwBe+j5+D64yypVZA8ycopOCFf5W8lZgatQ0RND+V8U01hfeU+WfZqQor0c6GP2DvwoeWHH2eIY/x3PsueQ5+G/2lqJSQ9vZu5AM1vU9yUYPp6luB8yboozzfGPxvwsevn8c3E+/lYXWU4JisNOSqPtw9LRlRyFIft5d8PRgioyhegtpnCTi/b0Qhy+lwvRdcW1eqRmSnA11PgjlTa0RBS3VBcfhqcBC5ohsvvKKbQXz9hvh2h52GXM3Z6p4kjctOQ66fJWWZPj5mTIO6VFlhyHTaFT9o7rRkRCFLCmtaMmOz7KnK6O/q/9Qpsg2fJu+v/wECnYp8" \ No newline at end of file diff --git a/public/docs/assets/search.js b/public/docs/assets/search.js index 840f971..1c3dd87 100644 --- a/public/docs/assets/search.js +++ b/public/docs/assets/search.js @@ -1 +1 @@ -window.searchData = "eJzFnV2TI7expv9L67bPuPFN6k62dbzalWwfSz4bGwpHB9VdM0Oru9km2aNROPzfNwpgFoGsF19FevdKiukqvMmqBJCZD1D4581+98vh5ssf/3nz8/bl8eZLfXvzsnkebr68ed3sD8P+cHN787Z/uvny5nn3+PY0HH5z+vd3H4/PTze3Nw9Pm8NhONx8eXPzr1tqxZ6bedg9Dr992j38PDX0/u3l4bjdvZybmq4Bjd6Ohgwvx8giqPP3w+6lIDH++YLWnzf7nx93v5QU6JILVD77G3MCn5+fetsWcjU1/tXL8eN+97p9mCROrfxm+kvxnRoho5f6cjju3x6Ou32ltS/SS7HhZ9Mi0+/k+cH8tDkMf90/1bTGy9726S84Hh7/Y3v4j+3Lx2G/PQ6PXbrD5+Pw8uhvKgpH111JOXHnjOrpmispPm8+3x8/bl9+3r58uD/ufh5eDjUD8C3XtKfdjGurD4fD5sNQ1z5fdy3l3eNQ9XS66Jqafx1/SJPw2+nKK6nvXv1AV5M+X3Yl3TBe1mSnq66k+o+3zdP2+Ot/bp+OdfHTxe/p4ivZcBj2n7YP1Zd9vuxausf9sHmuytJVV1I9Ds+vw35zfNtXf3F66bX0xyGyqny66Fqau91TtUfRRVfS/OrP39z/99d/+f6bP/2xprx53d5/GvaHLZvBuvR+//V/fvXXb3+4/+1X3399/9e/fFsTfRzeb96ejvdjmHDP44RFyt/96fdfN8vOx+wuze3ht8NmP+y/ejt+rEluDz/5azfh2iu93+3h293Dpjo3bQ9Pp8uuOk5Xvfl82f+XUfI/RNe7lVYKfdbavG7/1/BrQ7f52V+1/Bcy3YePm2NDZD1e1ttjmFKf97CneeGvfHp6/h/D5rHBiZ6enj9OVy7+rU9Pz39qC2qenp6vEdcwfT/UHBrea7jwwjfb1z8vfbN3+pzTbn7a7Y/VTnO66Fqaj4/fhaC/Kvz4+DxdeS31w2F7OG5e6r86uvBK2uMg0DJQXE/x/XB8+Pid99GasL/0mS69lr6Pu/8rBOHftSRm4Y5T2N4/4yfqH4bj79qG5w/jj5+P0Bf99g/D8fchcvmuOTv8MBxP0c6V8kRuUZsvfBiO1/aESbvtbeBx9VILYkdseQixG17xWfiB/He7l+NQH4b8tQ/TtQt7Qqz4u49v9Wwqln043XCJ9neN5SB/8VVqQsCGlvEnGHDZuOPbaIxf/LUogulX/OFUSWySPJ4vvoZmu1OR8DW86oexaNk0sAbp8fL5mLpEt6E4cJLkFYKFah0PeLz8Gk+3/cFeeZ7aD+/3w+Fj+7R5uuHfMWfuh8Pr7uXQYMJ03ZWUD8NLlZucrrmW4q+H41CvLtJVV1I9Ng5bcMS6THm3e/rd5qk6J4zXPYTrrqT81lA3f7u0ap4ofhr22/e//m738jJ4LFpTD9c/xNdfUBWKMeqfv/kk5urjv14Hn04ttaFTb04vNj1rdCPTkl4el54F+1FpSRFj0rNaHyItKTXh0bPwRWi0akeb/DVVs/FvpNkd9hYVcbQbyfUh0KpWZqZmgp3Tc0k1iz3Pkt2lwZJeDnee5XpRZ0mtgjnPogsRZ0k7W7g/q3ajzaJeBmtGcp1Is6RWxJlnyUUos6iLMWak2Icwi1o4Q4m0+tBlSauOEc+y7QixRTGDD+dylYS+pFXGhmepLDLsUMoAnlikEw2W9P7X1//n/i9ff/X9n/74zR//cP+7P/3xh6//+ENJ/efh1/v9sDnsXsZJuVqUqo+lRQ/txpHXGdFKGJIpNCLIs0QvfizrZdFjFAHXsGNZod0je6BUWbOEGs+yGcx4mW62ZJfoXhpTtKNGFjqVMWNZpb2/XfImG9Bi1Bn6sGJRq4AUI8EFOLGomkeJkWg/RixpYoSYdvjrKBXR4VlwETYs6jYgw0i+AxeWVEuo8Ky2BBNWVBsQYaJ/AR6sWFJ/1wuwYJNm/akvwIEV5QoKTNQXYsCSBWUEyAbl3kirF/1huQpCqGrmkR/Tu7ju0Yz6uPDy8aKM+JhOFe9VlfJoj0nVsV6zVpuzNOK8umoB5XHJBozXoFdJkFvwXZtK44NswHZVvbYHeMV5owHVncUvwHRlG3KILpbuxXMlRYzm4lSyB8sVlTJILtLqxHEltTyKY0WqLgxXVMwiuLRU1YffSooYvZ3V+rBbSamO3M6qy3Ebr3rEqO143Dx8fIYhxvSn60C3tLk28na2LlO0OQUiP/z6Cvr2XHG89hiuXar4uDluqlKni5ZqHFt+Tv/v4MWhXGyJn9tFWtvD73cPby1+9sX28Hi+9BLFb57hZDOT2z4X44QGrcPubY8qh0xquqxHKUl897vn324Og9VVrfHSn+jSS/TovcGqPRCll1esRDUp/+Gb/2xS/LB9f6GS95TWH+jd5fJf9z///PUfmvT+/joUYvUmrT//vu1Jvj5e+iT//Me2H/Vayj+alL7/7zalw6dLlX745j/bHt9x+/7S5/e/v/7tn5u0fhl+eu2cYaJp//fD8Pr9MMxTDvrDVab8pLGmCX+yq3O1TarUu+Cmpppdc5PKdi+7qen+Ha28STX/3rP4pqbXsv4mlb9kCU6TNc1GXFk7V5hiyr1lqaouLEox0a5FOU2KOC0Hsn0peU07tzonFe6FaTXVzBqdVLRzmU5Ns7xSJ5VetlinZkGObqfavUt2qqp41Q4T7Vu4U9Msrd1JhZcs36mqwxU8TLdrEU9VEZYpmWLXUp6aYnU1TyrevKCnVRev6cGi5QJ+TbG4sicVXPQ9gLo+Xl3BpfuW/NRU21b9pDbUF/5cPmbXvLx3OdBVx8zCoiCg07YuKBXqXBpUV82tDmJxeyfOrut2eXXHYpO6cmHlUCq+YPFQk3qOB87UrxD1NK8iAoFeF0iva3f13wvfeH2FEetWXYuMqor5dUZMtn+pUVU7u9qISXcvOKopwzVH82HkanqllUep7JLFR1X1+vojZkR5CdIlthRWJaU2LFiY1KBdX5s0s2L58qQGe5p8on+RUrNy03u4zgjbtVppZsOyBUs1O4prlsBgf4U4sXMlU96I+cKKiy3Jrm8CVlyjltS6ygnJX3VUKq59AurXiHbaV0QBA7qXLHRZ0OyK+aVSl9uSXz2FDMELqK5gRb1YkVlWdR3t9leBF1tdbEXzK7ju06+vwkpNWL4Qq25JZi0WN6BzOVZNF67I4ql8x6Ksqh5el8UU+5Zm1TSzq7NAAfKao112jda8DNm1TKumC1dqpZpdi7VqetX1Wqn24iVbqDIV4ds/7HYfnuYdKPzzVdBt1FQTuD1Z1IltY5VeaFtWzCLbWLIb2JY1Ia6N9bpgbVmrBdXG0peA2gZLGg24qm4utk5Ue4PqiiaMphPBrjC6QQ3P1TPJvim6rJvDsrFob8BeVswg2ViwE8iW9co4NpZdBmPL6jmsEOv2gtiKIsawiWAfhC3rlRBsLLoEwFaUIX5NNLvga0UNZjOJWlcaU1arYtdYuBm6tmli5IoEy8C1rFbErbHYItha08ZQKpXtA60to17Zf3qR5tVGngLOnGm0wcxYpBNl1hRzIDOJYivfOahpdPhHB8CqqRaAZSzc8En1BqVcwY4pXTz3N4PJWZhT/L5BTaejv130BusAMukKXfixopaHj4lkP3qs6GbBYyLbjR3LqhA68i5/Ja0ScIwll+DGinIdNiYGtH/toKxbAIuxXgYrdunUISJTXI4Qq7Y0vON+fNio2vCsl4yCXaCQ6S3DhGUbipBwNhjXdp+1a+HSf06wr/TfYEUWBs4suLxq0QoC59IXjBhF6DdTqn3xoEErC/hmYtVvHnSoNbrRMpTXYkce5M2NqH8HoUmxluY2fAmhVaf18fbjuQYLGh/rNeecOpiL5ZdjuZoVGSiXinciubImBHJpKtqB4ypaGMYlan0orqyXBXGzUlQPhqto5iAcL0h1IbiyJgRwsV4XfitrVeFbrLsYvc0rKDF42+/+MZfd7/5xHehGDbUht9GWXuA2KXTjtoJaHrZNcv2oraCHQduk1YfZCjpNkG2SvQix1axoEr+iZjZWPSt2B6klPRydnsX6sFpNKTPPpnKd02tBMwvUJsHuklpBLQfTJrFelFbQqoC0SXIhRisoZ0vZk2Y3QiupZQDaWawTnxW0ivBsElyEzkqqGJyd9fqwWUkJZxNnpT5kVlCqA7NJtB2XNehlYNlMrJJsF5TKoGwSWobJiroZCBJJdiKyglrbPsSzdt/Hx+tjaMlLu8HcNcayEpRL229EcpNAL5ArqmVx3Dm+7dyJUtZr9soerFNULGG5SXTBHsKqara8FqteGE2047k0YOra0VLWbO5/F7zRBkx37h59kK6kVEB0Z7kFgK6kmcdzZ8l+OFdQxGgu6f5X0SliuUluEZQrqTYgubN4B5AraJZw3KS1YI9fRbMBzcXqF4C5sh3Vd7wAyrUoVp/35SNfH56LtRfCuYJ+Gc2lg2/v3r1W3QwAgOKdAKBmQR7RpeqX1j6a8RyTvaQK0oHqUtUqqKvp5DFdKtRfDm9VbnKphcCuakMB1zEDGmBdXa2cXC/YW9em2faIF0C7mnrTo73eXNSA6ybpC2Bd0YIcqouEe0FdQQ9juig17YF0JZ0MojsrdQK6glYez6UFr2uNRnk0l5S9+sBcQQ9juUmrD8oVdOpIbtJcDuRY5STCcd9++91M8dtvv7sKjKN2mljcaEgnipvaRySuse0seJsah9ytsXWI2aaWZ5StsdUWqDaJ1Jhaj2aL1HKFXOR4bh/FjK2tw+jw3HQxq6y1i2extPFyVFJQyKGvqfladFloOwO6pqYB52psuYy1JoE81WrUyRV+JwXEsFrbxsjq3PScWDW2XAJUU/M5PtWqAXHUufUZjWptF8bH53aLS9gK7VZR0yTRTJoa1DBomkktHh+KmGmSaTrWtqiCy/eRwJwpdY0RhTeOOM6Ve2+B2qStt0GbqXnAbJrbziGac9RTWdRfbL31jS59MgX8Mkk0bImqaeQKJLFG3/zVjFbS6be44L+o0Or/7W+ijk3ODjqjJq3t5iHJuXHMSFoVskjkLACJSGP7EIAknWtJqyXcMTWeox2tGnW4cZZqZxsFxQLamJQathmVFeogI9Yqc4x21dq7wtSit/3ak+seQ7qQRKyUJxKNakUAkQ5atfUdjSq4cAmlykcv1vSycCHV6swTW1ECE1naV4vgINXonBnbsUEqU93a06rT4ghtR5lWFfNIgMnViUBdq5jwNGzcaVJoenj140trWi0PbfHzqpf2J6FyZb9ZL1PIj2RAHb+xdVi2j1KTl9aaYEuR/tzuvEbf2HK2JJ8m/Qt7erYAn6T+s/p7Y+uw3D61PKu2N7ZaLa5PCsXaeiGbjUrpBa8//+kqhXXWXFN9PbKuMxnnarWKWl2pWurhks0VH6gNQ76qJkV9F2g943CPK1VivbrOaSytCp2vW6q0eXz83dvhuJuPX1xr8/j4QFcuVXt4Gjb7Rj1/7cWK7zdjJ9qCEJPrxVcuVsPV8ZlUuTre5PMt7n6ZwldgagAipfmhSee3v37fOFB9GI4//bpotMpqNg8dk/ClY8iY2rf1gDHBv9T/J7V6B5jk/h+OXP8hL9J63n0aGh9muHjJ84xDgj89PW2e52ddh3++SigQNdUUBpws6iTtsUrvtteyYpa/x5LdW1/Lmn9HVD7W+3vP9teyVgurj6Uv2QLbYEmjAVfVzRVtEtXe5aAVTThMJ4JdC0Eb1HDgP5PsW8RW1s2tDIhFezeylBUz6wViwc6tsWW98iqCWHbZ9tiyei4hinV7t8hWFPGKg0Swb5tsWa+0DiEWXbJVtqIMVyckml3bZStqsISXqHUt6C2rVdPbWLg5tW3TxOsZkGA5MC2rFVc1xGKLts/WtDEZT2XLyx1aRrmyv/RuXr3aSFPAzDONttUQsUjnJtaaYm6NRBK1VqBkTaPDHzq2OtZUC+snYuGGJRQNSjlaxJQunuub11bMwpoiGm3SaZK5RKWjV1/kJ/XVHUmH69oWW1HLr/lIJPu3xlZ0sytBEtnu7bFlVbg+hA8sV9I6rxopKp7XjXR4aevqFCzUM650rlFJBJcfv1m2o7ByJdZfsC23qltfz8IsWL41t2pLwzvv357bqNrw7Pu36Fa1yytimP6ybbplG4rrZGaTQm2pTLsW5vw5wTLsb1DNrpmZKV5eg2ldSTOXvmDMLK6nmSnVltQ0aGVX1czEqri9Q63RbZZtwW2xI7/iZm5EfdFNk2ItaW9YetOq0/p4+7ffNljQ+FivOefUV+rE8su34dasyKzfScU7t+KWNeGqnjTR7tiOW9HCa30Stb4tuWW97AqgWWGtZ1tuRTO3LoiX17q25pY14WqhWK9re25Zq7qGKNZtXkY0rwfF2PB1ePnqm7mQ/+frYMNzU23YMFjUiw0jlW5sWFTMY8NIsh8bFjUxNoz0+rBhUasJG0bSF2HDuiWNBlxVNxu3xqrdIWtZE0ersWBf6ltXy8y9XLJz0i3qZrFhJNpdSiwq5rBhJNiLDYt6FWwYyS7EhkX1bDE/0u3GhmXFDDaMBTuxYVGviA0j0UXYsKyMsWGs2YcNy2o4A4nV+rBhUa2ODSPhdmzYpJnBhkCwkqoX1crYMBJr2g9d08pgoUSm70u7LaNc2V+6seG1RpoSNuQajdgwEunFhhXFLDaMo9YaNqxodPhHDw6qqJawYSS84Lu3DcrZIlmqfPHc344ReZhTxogVnY7+d9EbbQB8cdfoA3xltQLgiyUXAL6ybh7wxbL9gK+oigEfGwKupFUEb5Hkku/gVpQbQFxsQPt+8bJuCbxFekvAW023AbylFlwA3mq2NLzzBeCtTbXh2S8AbzXtCnhL9ReCt6INZfDGB+sqeGvWypT4M4IV8FZXzYM3rnh5FaMZvM2kLxhByuCNK1XBW10rD964WB28tas1uk3bvvYW3QJom4k2gLYWxVqa2wLaGnVaH2d9p3uDYuNjvOac0gDWIvkLwFrFihxYS8R7wVpRE4O1JBXtAWtlrQxYi9U6wVpRLw/WeOmpC6yVNbNgjRWg+sBaURODtUivD6wVtepgLdJd/O3beQUlAm2fAWX7fCXE9nlKUxr42ud+uDa130vWClpZrDaJdTO1ghoEapNSF00rqLSgtEn0Eo5Ws6FF+nqKuejzrNcbdZbUYLR5luqiZjUdPKumYn1zaUExB8smud5qWUErg8k+J7FK++hbUCoDsklwGR0r6OYK1p/PIUIfFytpYSh2luojYgWlEg6b5JawsJImBGGfk1DkKjowN/gchx9X8f0q/Jokm8lXgxrGXjOpcpZc0CkCr0lm0Sa5oipGG5FgH/cqaLUdMDkp18+XvGjsLHhqL3m7wihWYG5p623AbWq+k7YVtXKo7Ry9dlZ/i2qtntnBZ4p6Bdw2SS5gbTXNXFEs1rwsbmhGbGlY1FVJLiq29rflb7KO2c4doouxlXTygO0s1k/XSopZtHYW7OZqBT0I1ZLOfg2VEk6bxJawtJJmHaSdpZdvZytYUEBqk/ICnlZWrMO0WHs5SStbUXvX/QytRa/2pC8e87rQWay8jJsV1IvQLB10L4uqOvEZlO7bIFPTz4K0VPvCekYrQmOi1xo7ijAt1bwwcmjHaqlsd3W7VbfFsZZta6takEdtTB5ztsu0i4n1glMmmxSbHnb/JreadstDvtrzrSO4SXg5fyvqZ+BbJNtJ3gpqELtFqWgHcyupYOB21umjbQWlLGpLi1tXGomykC0pcXURtoIaxGuTUhdbK6hUwdqkuJiqMXVjJ/GvXo4f97vX7QOfzLYvx2H/fvMwHH7DrynytqT0wkojtTZ7SyUz43NmHI+bh4/Pw8ux/fd9kd50XYM4JKwa0wsNWw2ZEcSqJd1EsdWUBC9WzejCja0mlNhj1aJLWGSXgUvs+jeZw0P9ujG9UX+zKUn0X7ejKxFoNYIRwaoVnYSw1QyMC6vWLMOHrUbxKnzVnF622GxIChrrdvSBx1YzEIWs2rKESjYblCDKuildyLLLiDiabLOjtHa0WTrJ8Oq6XclewYg4GvvDbvfhaVZXiGxJLrhGHDZvsDcIS21eFoEhKxaEX02mFGIvYEZv4NVkwqnu1vooosuxizeJlkI9INod5zUZ8WF4Gcev7e7ld7uX91vYz4Ex59se6LYLnkQu0gTCXWFmk3hjjAlsuSTAbDet26J/hyGFuBKZ0RtUthmRiyiRBV3hZJN8PpYE+p2BZJMB1SgS2LEshGwypxA/AkN6g8c2E7KRI7KgL2xsM8BX7e63pwXK2+ZhDN54wQhaCV6BBUsi1zZTcmErMqIrZm2Tz0WNSL4rZMzJz+PFkRTkTRj/2h4pPg6Hh/32texbU5NfpJeXPMobmRH1/2lQO123VOZ1s988D8d4HUxBLLm6QzJ+P9+8vL4d//R2fH07/pCdWGcXtb+t7XhrYc7GTX/BbsM/bm57bs21v6bfCn5fvxnxo/722+++IUlkRPz3izaMlNps2jySWJp5plO+2Ca7iS6/QPTP33wSjYKv20/55ZJNYlN+16gYX79c9vfD8Pr9MMAZYy76OAyvh3D1cskwSrQJfqBrL5Db7/7RKBauXC41Hq7apPT0VDwhtyoE2HBRr+2Y4qosOw+uKLmjay+QS78jWZZ7HV4224vk+JrQoh5aj90reArKGxWjq5dLfm59np/7HyZbRPBhe8gkSnO16Orlkm8vfaLJ9X2y0TzLF9xGkqc/tYcvD2yBXKatL2oflSCbMjL7HR5+E43TRa0C8SMJw0ShaptccI2q7bzBZUcBXVa1RVYsqNo2mVKo2gIzlh16eUEBFdiw8BTMhbVLYEBX7bJJvLF2CWy5/JzMi2qXOYv+HYYUapfIjGWnOCytXSILFhwPU5Hnu3DLBrSd8bC4XAoUFx16eWm5FNhxySmYy8ulwJBlx2IuLpciC5ack3lZlRJYsfzgzKVVSmTErErZL5erSiK5BUdplquSIY8pxUPxBVeJh2YNLvvG6YXxELBiSTzUYkopHpqbsexr/pfEQ3MbFn7ev2KEL2U2WkDXZjpWi1w2/Jqr9YVfLeJjyFIttgJL0H0XPIPWKBAbctmxB5dFgRmL/h2GlKJAYMayTwoujgKBBQtOSlgeks31Fx0ocHFINrfjkhMGKuZMH1JoNCW+/oLuWooE56LLTjpYHgkCC5YcfXBhJDi3YvlZCIsjQWDEgsMRFkeGQH7BaQktkWGOV5//ejVezZps4tWRkZ28mqsVeXVdpsyruVgDr65LHo777UMhnjrLTVculTr++tr0EE/XdcikHlfIQq6Yf5Qzj9Tky3KMhuyiQ66YR1QyiA6Zcq5QyxI6hPIBeik07xBojn474t5e+Q7Vy8SKUWwtfu0RykeqxRi1Q6IUjZbj0A6RhoizKdbskCyGd5XArkemEMKVg7cOkWqY1hCg9cjlQ7H2ctzicKsYaJUl4gnuz3mKf/pTcYJjTR23m6fv/Tv8C98VnzY8v7AH+z6/Pg3Hzqa/iG7DDwrb388oCjbUWEWXCYWuWzCh1pH7TMh265IF5U7eZUC2Dxb0yz2yJh97fMnJl/h1djkD9+TyeobaQyvNy6lSdWK+oJv09YwLOkOf/zd5XKYKA/ysVH+pSu12T/fjxyganl9y7TK5t9zCnFSpvC6u0mNCt9q+fPiBf4kj0ptd1LGiGaYIuMEvtvk0YW5nL0DIaRYhQrNsroSQUS1WEkqi83dXGvOWz+jZkQ/P5H2fPGubSkqjIrKiu+h+cWDRGlFcbkRhNG2NKS43ojTSZuf1ng8FNZpRHIWhIXgsvtyU7AiNrOj7wFVTvJOrvF615tpXbS2VCMM+lcPDxwGvwj4rsSv7pXLjcWMxt1CBLE2U15ofm6fFxbNhzyS4dO7rmPIqM112q8Bf+xYoB7d62B3gMzk39kVyITb5r6UlyrWNXDOpTmhdFH9KPvKNVedfoG9u/rT0oOEpplcul2p6jvkVEYsf5HF33Dy1/NDkwgW/M9zf9DPZpW1icV/63/vN6+vwmJs6oj+396v3by/Z7cu8wS+ii7H1sYWdsGkmVqRNWCjzsHJjPrvk6g/tPIL2PDg+mN6tnTDnjZB+ayBnaeOzOvwm/lO51MjbnOjVD/HbObWa/LGr3XEDHDY1+ktXi98ks2Joy/9bVyvffvvd93wHVGgr+ktXi6dtKfw706HR9I9L2v1LvG8maXT8S1+LCeA5tTX+W38rP2w+oIZ+2Hzob8uPe8D70j8uiRfuX4d9GHXrTX+RuS2zFyk1vMAJYVCRtwLccqkFeD4um9C2UrHdhi7x66juHhsc6ovTZZcqwW4FpPLRW5tWFJx5Lz2fC9Tq5vUWrmnfUquuYQsvv+TVa9XsRr2319fd/ni437w9bnf3WzBfQXF82xUtCQ+335TpvqvYQoGQr+nEBakGe8C9V7Hpdb97fj3eP2wePnZaNLvzKvbMV6c2mFJfotppxaftIY5wG0yY7riK/i/DT/eHYbN/+NhjQ3LXJXYcNx9aZqrTZZ1KPAIKywNmOCDIpn/siqz+Cy6CCc0mf+tYeff0tPvl++3z9mlTb/ELf/Vhuho/ptTKku5fX35+2f3CnTKn+zZdfYHucffKTq3Jih53r+DYmhZF/t5OKcgf4+pckI3+Unxn6tzY6eSiqaFPm/1289PTcPjN6S+oob/d3mxfHofPN1/+c/wqvu/YX97Id+rd+ub25v12eHo83Hz5IxUQH3bPp+/BPO4e3vz//u102X8P4yd4xovD1b+5u7n98e5W2XfSmr/97fZHutn/wf8DtXH+F3+juLn9Udxa+W61FsmNYnajSG6UN7c/ylu1fmeUS26UsxtlcqO6uf1RIUU1u1ElN+qb2x81ulHPbtTJjebm9kdza9w7t1LJjWZ2o0lutDe3P9pbLd7Z1Sq50c5utMmN7ub2R3er797JO53c6GY3uuTG1c3tjyt042p24yq5cZ17HevZjevUAUZ/WCNJMfcdwZzHe88dkhXAf1IHEqNbCHGrVu/WQqY3z31IpE4kRtcQEt489yOROpIY3UOoWy3fGclunvuSSJ1JjC4iNHxgc38SqUOJ0U2EgTfPfUqkTiVGVxEWPu25X4nUscToLsLdKvVOa3bz3LdE6lxidBmxgspz/xKpg8nRZwT0MDn3MJl6mBx9RkIPk3MPk2yI8mOUgMpglEo9TI4+IyUab+Tcw2TqYXL0GalutXqnVephcu5hMvUwOfqM1Ldav9MqHbDk3MNk6mFy9Blp4M1zD5Oph8nRZ6S9Ve6ds+mIJ+ceJlMPk6vcHCTnDiZTB5PrrHfKuYPJ1MHU6DISDrdq7mAqdTDlHWyFnpeaO5hKHUzJ7PNScwdTbB70DraGymAqTB1MjS6j7uDNcwdTqYOp0WWUgDfPHUylDqZs7jWruX+p1L/U6DFKQuG5f6nUv9ToMkrBm+cOplIHU6PLKNin1NzBVOpgenQZBfuUnjuYTh1Mjy6jLLx57mA6dTA9uoxy8Oa5g+nUwbSPtKBr67mDaRZseQeD3qlBvJU6mB5dRkPv1HMH06mD6dFnNPROPfcwnXqYHn1GQw/Tcw/TqYfp0Wc09DA99zCdepgefUZDD9NzD9Oph5nRZzT0MDP3MJN6mBl9RkMPM3MPM6mHmdFnNPQwM/cwk3qYGX1GQw8zcw8zqYcZH89DDzNzDzMspPcxPfQwA6L61MPM6DMGepiZe5hJPcyMPmOgh5m5h5nUw8zoMwZ6mJl7mEk9zIw+Y6CHmbmHmdTD7OgzxqCIxM49zKYeZkefMdDD7NzDbOphdvQZAz3Mzj3Mph5mR58xK5iDzT3Mph5mR58xa/ib5x5mUw+zo89Y6GF27mGWJY4+c4QeZkHumHqYHX3GSpSw2rmH2dTD7Cqbs84dzKYOZte5tNXO/cum/uXucpmrm7uXS93LiVzy6ube5VLvcjKXvrq5c7nUuZzKpq9u7lwudS6ns+mrmzuXS53LmWz66ubO5VLncjabvrq5czlWmXDZ9NWB4kTqXG6VTV/d3Ltc6l1unU1f3dy9XOpeq7tsgrCa+9cq9a+VyKavq7mDrVIHW8ls+rqae9gq9bCVyqavq7mHrVIPW+ls+rqae9gq9bCVyWagq7mHrVIPW9lsBrqae9gq9bCVy2agq7mHrVj9a5XNqFagBJZ62Gr0GatujXi3vktzk9Xcw1aph62zJdT13MHWqYOtRdY713MHW6cOtpbZ9HU9d7B16mBrlU1f13MHW6cOttbZh72eO9g6dbC1yaav67mDrVMHW9ts+rqeO9g6dbC1y6av67mDrVMHW2erFOu5f61ZjXWdTV/XoMzK66x32fw1/C29Pfq30/0im8KGv/H7Wbn1Tmaz2PA3fj+ruN6pbCIb/sbvZ0XXO53NZcPf+P2s7npnsuls+Bu/n5Ve72w2ow1/4/ez6uudyya14W/8flaAvVtl89rwN34/q8HerbOpbfgbv5/5n6/d4+xWoFL/rNYvsgmugNV+5n++go9zXIEK/rzi74v4OM0VqObPi/6+jo8zXYHK/rzu70v5ONkVqPLPS/++mo/zXYGK/7z67wv6OOUVqP7PAYCv6eOsVyAEwBmAL+vjxFcgCsAwgPCVfZz7CgACBCMBwhf3cforAAsQDAYIX9/HGbAAOEAwHiB8iR8nwQIQAcGQgPBVfpwHCwAFBKMCwhf6cSosABcQDAwIX+vH2bAAaEAwNiB8uR8nxALQAcHwgPAlf5wTC0AIBEMEwlf9rUZpsQCQQDBKIHzhH2bGAmACwTiB8KV/ayAqB6RAMFQgfPXfWnw/cD9GC4QHANbdGv1OC24/Qp7M/TwDsCsUHAuADARjBsJjALvG9gP3Y9hAeBTg7vD9wP0YOhCeBji8VAHAA8HogfBAwEGeJwA/EAwgCM8EHFy4IABCEIwhCI8FHFy/IABFEAwjCE8GHPY/ABIEIwnCwwGH/Q+wBMFggvB8wDl8P/A/xhOERwRuhe9H1J35n6cEDvsfgAqCUQXhQcEK+x/gCoKBBeFZwQr7H0ALgrEF4XHBClb1BKALguEF4YkBHr4AXxAMMAjPDGCSKQBhEAwxCE8NYHlPAMYgGGQQJrtaSADKIBhmEJ4c4DUqgDMIBhqEZweZZSoANQjGGoTHB5mVKoA2CIYbhCcImcUqADgIRhyEhwiZ9SqAOQgGHYTnCLklK8D1GHcQHiVkVq0A8iAYehCeJmQWrgD4IBh9EDZfXhGAPwgGIIRnCpnlKwBBCMYghMcKuAQoAIUQDEMITxZwFVAAECEYiRAeLuBCoAAsQjAYITxfwLVAAXCEYDxCeMSAy4ECEAnBkITwmAFXBAWgEoJhCeFRA65TCUAmBEMTwuMGXBcUgE4IhieERw6wZiQAoBCMUAhPHTLuCyCFYJRCePCAC4QCcArBQIXw7AHXCAVAFYKxCuHxQ+bxA1ohGK4QnkDgSqEAwEIwYiE8hMDFQgGYhWDQQngOgeuFAmALwbiF8Cgi8/qB9zFyITyMwFVDAdiFYPBCeB6RqRsCfCEYvxAeSWTqhoBgCIYwhKcSmbohgBiCUQzhwUSmbgg4hmAgQ3g2kakbApQhGMsQHk9k6oaAZgiGM4QnFJm6IQAaghEN4SFFpm4ImIZgUEN4TpGpGwKsIRjXEJ5VZOqGAG0IxjaExxWZuiGgG4LhDeGJRaZuCACHYIRDeGiRqRsCxiEY5BCeW2TqhgBzCMY5hEcXmbohIB2CoQ7h6UWmbghgh2C0Q3iAkakbAt4hGPAQHmJk6oaAeQgGPYTnGJm6IcAegnEP6TkGrhtKwD0k4x7ScwxcN5SAe0jGPaTnGLhuKAH3kIx7SM8xcN1QAu4hGfeQnmPguqEE3EMy7iE9x8B1Qwm4h2TcQ3qOgeuGEnAPybiH9BwD1w0l4B6ScQ/pOQauG0rAPSTjHtJzDFw3lIB7SMY9pOcYK4USbwm4h2TcQ3qOARNvCbCHZNhDeowBgzcJqIdk1EN6igETbwmgh2TQQ3qIgbfpAOYhGfOQnmHgpfsAeUiGPGTY7pBZvQ9cjyEPGXY8wMRbAuQhGfKQYdMDTLwlQB6SIQ8Z9j3AxFsC5CH5zoew9QEm3hLtfeCbHzzCwIm3RNsfZvsfZDbxlnAHBHM+jzAyy/LRJgi+C8IjDJx4S7QPgm+E8Agjs3UEbYXgeyHCZgjsf2g3BN8OEfZDZDaQAP/jOyI8wshsA0GbIviuCI8wMjtB0L4Ihjxk2BmBN4MA5iEZ85BhcwTeDwKYh2TMQ3qGgbeEAOQhGfKQquB+AHlIhjykRxg4cZYAeUiGPKRHGDhxlgB5SIY8pEcYuccH3I8hD+kRBk6cJUAekiEPGXZM4JkXIA/JkIcMmybwzAuQh2TIQ+rskioJiIdkxEOGnRM4cATEQzLiIcPmCRw4AuIhGfGQYf8E7n2AeEhGPGTYQoF7HyAekhEP6QkGTpwlIB6SEQ/pCQZOnCUgHpIRD+kJBk6cJSAekhEP6QkGTpwlIB6SEQ8ZdlRg9wXIQzLkIcOmCuy+gHlIxjxk2FeB/Q9AD8mghwxbK7D/AeohGfWQYXdFZh8g8D+GPWTYYIH9D2APybCH9BgDJ84SYA/JsIf0GAMnzhJgD8mwh/QYAyfOEmAPybCH9BgDJ84SYA/JsIcM+y2w/wHsIRn2kGHLBfY/gD0kwx4y7LrA/gewh2TYQ4aNF9j/APaQDHvIsPcC+x/AHpJhDxm2X+DEGWAPybCH9BgjkzgD7CEZ9pAeY2QSZ4A9JMMe0mOMTOIMsIdk2EN6jJFJnAH2kAx7SI8xMokzwB6SYQ8ZsAf2P4A9JMMe0nOMFVxwIwH3kIx7SM8xcOIMsIdk2EN6jIGDN0A9JKMe0lMMnDgD6CEZ9JABesDMATAPyZiH9AwDJy4AeUiGPGTYpYETF4A8JEMeMmzUwIkzQB6SIQ8Z9mrgxBkwD8mYhwzbNXDiDJiHZMxDhh0bOHEGzEMy5iHDpg2cOAPmIRnzkJ5hZBJnwDwkYx7SM4xM5gKYh2TMQ3qGkUmcAfOQjHlIzzAyiTNgHpIxDxn2cGD/A8xDMuYhwzYOnDgD5iEZ85CeYWQSZ8A8JGMe0jOMTOIMmIdkzEN6hpFJnAHzkIx5yHX+wwASMA/JmIf0DAMTawmYh2TMQ3qGgTMvgDwkQx7SI4zc5xiA+zHkIT3CyCTeAHlIhjykRxiZxBsgD8mQh1zndxFJgDwkQx7SI4xM4g2Qh2TIQ4WtHvjrDAB5KIY8VNjqgT/QAJCHYshD3WXrLgoQD8WIhwo7PfBnGgDxUIx4qLDTA3+pARAPxYiHCjs98McaAPFQjHiosNMD9l4FiIdixEN5goETbwWIh2LEQ3mCgRNvBYiHYsRDeYKBE28FiIdixEN5goETbwWIh2LEQ4WdHth9AfJQDHmosNMDuy9gHooxDxV2emD/A9BDMeihwk4P7H+AeihGPVTY6YH9D2APxbCHCjs9sP8B7KEY9lAeY+DEWwHsoRj2UB5j4MRbAeyhGPZQHmPgxFsB7KEY9lAeY+DEWwHsoRj2UGGnB/Y/gD0Uwx4q7PTA/gewh2LYQ4WdHtj/APZQDHuosNMD+x/AHophDxV2emD/A9hDMeyhwk4PmHgrgD0Uwx7KYwyceCuAPRTDHspjDJx4K4A9FMMeymMMnHgrgD0U/x6Uxxg48Vboi1D8k1Bhqwf2P/RRKP5VqLDVA/sf+i7U7MNQnvnCLxYq+Gko5n9hqwd8fOjjUPzrUB5j4M9poc9D8e9DeYoBE2+FvhDFPxEVoAfKPBT6RhT/SJRnGDDxUegrUfwzUR5h4MRboQ9FMeShPMPAibcCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahPMPAibcCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahPMPAmYsCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahPMPAibcCzEMx5qE8w8CJtwLMQzHmoTzDwIm3AsxDMeahwkYPPHUB5qEY81CeYeDEWwHmoRjzUCb/SQMFmIdizEN5hoEzJ4A8FEMeyhTcDyAPxZCH8ggj8ylDgDwUQx7KI4zM1wwB8lAMeSiTX2mvAPJQDHkojzAy3zQEyEMx5KE8wsgkzgB5KIY8lEcYmcQZIA/FkIfyCCPzcUPgfYx4KE8wMokzIB6KEQ/lCUYmcQbEQzHioTzByCTOgHgoRjyUJxiZxBkQD8WIh/IEI5M4A+KhGPFQnmBkEmdAPBQjHsoTjEziDIiHYsRDeYKRSZwB8VCMeCiPMDKJM0AeiiEP5RlGJnEGzEMx5qE8xMgkzgB6KAY9lKcYmcQZUA/FqIfyGCOTOAPsoRj2UB5jZBJngD0Uwx7KY4xM4gywh2LYQ3mMkUmcAfZQDHsojzEyiTPAHophD+UxRiZxBthDMeyhPMbIJM4AeyiGPZTHGJnEGWAPxbCH8hgjkzgD7KEY9lAeY2QSZ4A9FMMeymOMTOIMsIdi2EN5jJFJnAH2UAx7KI8xMokzwB6KYQ/lMUYmcQbYQzHsoTzGyCTOAHsohj2UxxiZxBlgD8Wwh/IYI5M4A+yhGPZQAXtg/wPYQzHsodb5D7srwD0U4x7KcwycOAPsoRj2UOtC9Aewh2LYQwXsgYNvgD0Uwx7KY4wV3OKvAPZQDHsojzFWcIu/AthDMeyhPcZYrVDdQgPsoRn20B5jrOAWfw2wh2bYQ3uOsYZb/DXgHppxD+05xhpu8deAe2jGPbTnGGv4iQkNuIdm3EN7jrGGn5jQgHtoxj205xhrfEQG4B6acQ/tOcYafmJCA+6hGffQnmOsof9pwD004x7ac4w19D8NuIdm3EOHoyzgJyY04B6acQ/tOUbGfwH30Ix7aM8x1th/AffQjHvocKjF3R02ADggAx9aZAt/GnAPzbiHFtnCnwbYQzPsoUW28KcB9dCMemiRLfxpAD00gx5aZAt/GjAPzZiHFvnCnwbMQzPmoWW+8KcB89CMeWiZL/xpwDw0Yx5a5gt/GjAPzZiHlvnCnwbMQzPmoWW+8KcB89CMeWiZL/xpwDw0Yx5a5udeDZiHZsxDy3zhTwPmoRnz0DJf+NOAeWjGPLTMF/40YB6aMQ+t8oU/DZiHZsxDq3zhTwPmoRnz0Cpf+NOAeWjGPLTKF/40YB6aMQ+t8oU/DaCHZtBDq2zhTwPooRn00KrgfoB6aEY9tMoX/jTAHpphD63yhT8NuIdm3EOrfOFPA+6h+QkZOl/40+iMDH5Ihs4X/jQ6JoOfk6HzhT+NTsrgR2UE7nGHYz90WsbsuIzsiisNz8tg7hf2euBzK9CRGfzMjLDXAx9dgU7N4MdmhL0euPuigzP4yRlhrwfuvujsDH54hg7TL46d0fkZDHxoDzIyx6wA8KEZ+NAeZGROWgHgQzPwoT3IyBy2AsCHZuBDe5CROW8FgA/NwIcOmz0yR64AB2TgQ4fNHrgDAfKhGfnQYbMHdmBAPjQjHzps9sAODMiHZuRDh80e2IEB+dCMfGhPMnDpUQPyoRn50J5k4NKjBuRDM/KhPcnApUcNyIdm5EN7koFLjxqQD83Ih/YoA5ceNUAfmqEPHTZ7YP8D6EMz9KHDZo/MqT/A/xj60GGzB/Y/gD40Qx86bPbA/gfQh2boQ4fNHtj/APrQDH1ojzJw6VED9KEZ+tAeZeDSowboQzP0oT3KwKVHDdCHZuhDe5SBS48aoA/N0If2KAOXHjVAH5qhD+1RBi49aoA+NEMf2qMMXHrUAH1ohj50OJTjDn5mQgP2oRn70J5l4NwdoA/N0If2KAPHj4B8aEY+tCcZOHcH4EMz8KE9yMC5O+AemnEP7TkGzp0A9tAMe+iw2wPnTgB7aIY9dNjtgXN3gD00wx467PbAuTvAHpphDx12e+DcHWAPzbCHDrs9cO4OsIdm2EN7jJHJ3QH20Ax7aI8xMrk7wB6aYQ/tMUYmeQLYQzPsoT3GyOTuAHtohj10OL4D+x/AHpphDx12e2D/A9hDM+yhwyEeOHcH2EMz7KHDOR44dwfYQzPsocNRHpmj64D/Me6hw3YPHDsB7qEZ99CBe+DkE3APzbiHzp/poQH20Ax76HXB/QD20Ax7GI8xcO5uAPYwDHsYjzFw7m4A9jAMe5i7/GYjA7CHYdjDeIyBc3cDsIdh2MOE7R74LD6APQzDHiZs98DH8QHsYRj2MB5jwNdvAPUwjHqYsNsDH+gHqIdh1MOE3R74TD9APQyjHibs9sDH+gHqYRj1MGG3Bz47ElAPw6iH8RQDZ84GUA/DqIfxFANnzgZQD8Ooh/EQA2fOBkAPw6CH8RQDZ84GUA/DqIcJuz2w+wLsYRj2MGG3B3ZfwD0M4x4m7PbA/gfAh2Hgw4TdHtj/APkwjHyYsNsD+x8gH4aRDxN2e2D/A+TDMPJhPMnIHF8KyIdh5MN4kpE7wRT4HyMfxpOMzCGmgHwYRj6MJxmZc0wB+TCMfJiw2wP7HyAfhpEPE3Z7YP8D5MMw8mHCbg/sf4B8GEY+TNjtgf0PkA/DyIcJuz2w/wHyYRj5MGG3B8ycDSAfhpEP40kGzpwNIB+GkQ/jSQbOnA0gH4aRD+NJBs6cDSAfhpEP40kGzpwNIB+GkQ8TDvbA/gfQh2How3iUgTNnA9CHYejDqPCtA/idCQPYh2Hsw3iWATNnA9CHYejDhIM9YPQGyIdh5MN4kgEzZwPAh2Hgw3iQATNnA7iHYdzDeI4BMxcDsIdh2MOcsAeWB77HqIcJ2z1g5mwA9jAMe5iw3QNmzgZgD8OwhwnbPWDmbAD2MAx7mLDdA2bOBmAPw7CHCds9YOZsAPYwDHuYgD1g5mwA9TD82HBPMXDqYtDB4fzkcE8xcOZs0Nnh/PBwTzFw5mzQ8eH8/PCw3QP7HzpBnB8hHrZ7wMzZoEPEZ6eIm2zmbOA54sz/PMXAmbNBR4nzs8TDfg8cO6HTxPlx4ib/oQODDhTnJ4p7ioG/M2HQmeKMehib/cKfAdDDMOhhCgd7GAA9DIMexkOMTOYNoIdh0MN4iJHJvAH0MAx6GJtfdGAA9DAMephwsAcOHQH0MAx6mLDfA0/dAHoYBj1M2O+Bp24APQyDHsZmCy8GMA/DmIcJ2z1w5AmYh2HMw4TtHjjyBMzDMOZhwnYP3HsB8zCMeRjPMDKZN2AehjEP4xlGJvMGzMMw5mE8w8hk3oB5GMY8jGcYmcwbMA/DmIfxCCOTeQPkYRjyMGG7B3ZfwDwMYx4mbPfA7gugh2HQw4TtHtj/APUwjHqYsN0D+x/AHoZhDxO2e2D/A9zDMO5hPMfIZN6AexjGPYznGJnMG3APw7iH8Rwjk3kD7mEY9zCeY2Qyb8A9DOMexnOMTOYNuIdh3MOE7R7Y/wD3MIx7mLDdA/sf4B6GcQ8Ttntg/wPcwzDuYcJ2D+x/gHsYxj1M2O6B/Q9wD8O4h/EcI5N5A+5hGPcwnmNkMm/APQzjHsZzjEzmDbiHYdzDeI6RybwB9zCMexjPMTKZN+AehnEP4zlGJvMG3MMw7mE8yMhk3gB8GAY+TAAfd3DBvwHkwzDyYQvkwwLyYRn5sJ5kiDt7q9fvVusVa2DugZahD3uXPVzBAvJhGfmwd9nDFSwAH5aBD3uXPVzBAu5hGfewd9nDFSzAHpZhD3uXP1zBAu5hGfewd/nDFSzgHpZxD3uXP1zBAu5hGfewnmPg3NkC7mEZ97CeY+Dc2QLuYRn3sCKffFjAPSzjHtZzDJw7W8A9LOMeNuz2wO8fcA/LuIcNR3vg9w+4h2Xcw4r86GcB97CMe1jPMXDubQH3sIx72HC4xx3c7mMB+LAMfFgPMjKjDwAfloEPe9rygUcfQD4sIx/WkwysD8CHZeDDhi0fd3C/kgXkwzLyYT3JwMMXAB+WgQ8btnzcwf1KFpAPy8iHlfnxD4APy8CHlfnxD3APy7iHlYXxD3APy7iHlYXxD3APy7iHlYXxD3APy7iHlYXxD3APy7iHVYXxD3APy7iHVYXxD3APy7iHVYXxD3APy7iHDad7CLhd0wLwYRn4sB5kZAZQAD4sAx82HO+BHQiAD8vAh1WFARCAD8vAhw3gQ8A9BxaAD8vAh1XhK88SnQlvAfqwDH3Y8LWrsX4ORkAAPyyDH/b0uSvIbizAH5bhDxu+dyVgCGoBALEMgNjwwSthsQXADRkCsQGBCIcbAG7IGIg9MRA8jAMIYhkEsTq769wCBmIZA7GeacCPxVmAQCxDINYjDQvPJbcAgViGQKxHGjbjAcAHGQKxHmngc9EtQCCWIRDrkQY+3s0CBGIZArEm/5V7CxCIZQjEeqSBP9ZnAQKxDIFYjzQwwrIAgViGQOzpcHO4adgCBmIZA7HhdHO8ctkCCGIZBLEmu/7KAgZiGQOx4XRzgcMQAEEsgyA2HG+OAbIFFMQyCmLD+eYSz0QAg1iGQWw44FzikRyAEMtAiA0gRMLtTxaQEMtIiPVkw64QyLGAhFhGQmw44lzCbw9YgEIsQyHWhjXQ8OMDFrAQy1iIDSwER9OAhVjGQqy1hWgewBDLYIj1cCOjD7yQsRDr4QYOxgELsYyF2HDGOYxFAAqxDIVYjzZwKARIiGUkxDqRj6UBCbGMhNhwxDmOpQEJsYyEWKfysTQgIZaREOvJBub4FpAQy0iI9WQjE4sDEmIZCbFh9weOxQEJsYyE2HDeB47FAQmxjITYcN4HjsUBCbGMhNhw3kfGf4D7MRJiw/4P7D+AhFhGQqwnG5lQGpAQy0iIDTtAJA4jAQqxDIVYjzYyow9AIZahEBu2gGRGH8BCLGMhdpX99oYFKMQyFGJX2W9vWEBCLCMhNuwAkRbOHwCFWIZCbDjwA79/4H+MhNiwA0TiIBygEMtQiF3nxz9AQiwjIXZdGP8ACbGMhNh1YfwDJMQyEmLXhfEPkBDLSIhd57++YQEJsYyE2HX+6xsWkBDLSIgNX76SOAcCKMQyFGLXhQEQoBDLUIhdFwZAgEIsQyF2XRgAAQmxjIS4u/wA6AAJcYyEuLv8AOgACHEMhLi7MADCINoBFOIYCnF3Kl9LcACGOAZD3J3O1xIcwCGO4RDn+UamluAAEHEMiLgARBQM4x0gIo4REecJh3X4Ecyd0DEi4gIRgVGIA0TEMSLiPOHAk5ADRMQxIuICEcGTkANIxDEk4jziwPqAiDhGRJzI1qMdACKOARHnAQecQxzgIY7xEOf5BuzBDuAQx3CIEyY7BTiAQxzDIS6cdQ6nAAdwiGM4xAUcAqcAB2iIYzTEifxSVAdoiGM0xIn8UlQHYIhjMMTJ/FJUB2iIYzTEyfxSVAdgiGMwxMn8UlQHaIhjNMTJ/FJUB2CIYzDEyfxSVAdoiGM0xMnsB4gcoCGO0RAXaIiCJQgHcIhjOMQFHKJgCcIBHuIYD3GyMPoBHuIYD3Eyvw3TAR7iGA9xKr8L2AEe4hgPceELWNgBAA9xjIc4lV8N4wAPcYyHOI83cAnHARziGA5xKr8W3wEc4hgOcR5vZN4fwCGO4RBX+ASWAzjEMRziPN3APNoBGuIYDXGBhihYx3SAhjhGQ1ygIUrD+R/QEMdoiAs0RMEs1AEa4hgNcYGGKAgzHKAhjtEQF2gIzuMcoCGO0RB3oiE4igM0xDEa4gINUZDqO0BDHKMhLtAQ3A0ADnEMhzhdmIcBD3GMh7jClhAHeIhjPMSFLSG4GwAe4hgPcTp/+JEDPMQxHuLClpBMNwBAxDEg4sKekEw3AETEMSLiwqYQBZNBB5CIY0jEmVIuApiIY0zEBSaicDYEmIhjTMQFJpLpBoCJOMZEnMkvDXQAijgGRVyAIpmRAEARx6CIC1AkMxIAKOIYFHGmwIYdgCKOQREXoEjGjwAUcQyKuABFNM7nABRxDIo4W2DDDlARx6iIC1RE46AKUBHHqIgLVETjoApQEceoiLMmT6cdwCKOYREXsIiGYMcBLOIYFnE2jIcQ7DgARhwDI86GuBDPigCNOIZGnGcdQmdeI/BEBkdcOAgd5/WAjjhGR1ygIxrPaQCPOIZHXMAjmV8A+IhjfMQFPqLxkAoAiWOAxJ0ACR4RASFxjJC4EyHBnREgEscQiQuIJDMgAUbiGCNxgZEY3BkBJHEMkrgASQzujICSOEZJXKAkBvclgEkcwyQuHIuOc3yASRzDJC4ci25wXwScxDFO4k7nouO+CDiJY5zEhU9lGfiNdwdAiWOgxAVQYnBfAqDEMVDiwseyMCpwAJU4hkrcKr9u0AFW4hgrcaevZeG+CFiJY6zErfILBx2AJY7BEhdgicF9GcASx2CJC9/LsrgvA1ziGC5x6/zSQQdwiWO4xAVcYnFXBrzEMV7iAi+xuCsDYOIYMHEBmFjclQExcYyYuBMxwV0RIBPHkIkLyMTirgiQiWPIxAVkYnFXBMzEMWbiAjOxuCsCaOIYNHEBmljcEwA1cYyarO6CI0JPXgFssmLYZBU2kMCi5QpgkxXDJquATRzsCSuATVYMm6wCNnHQk1cAm6wYNlkFbOKgJ68ANlkxbLIK2MRBT14BbLJi2GQVsImDnrwC2IT+7W+3N9uXT8P+ODx+8/I4fL758scfbzY/7fbHm9t/3txvw78peet1br78541yN1/+81+3N+t1+K9Y3YX/kVqE/xmPkg//Y3X4n/FL1/5/xu9uhf+x/vZ/3ZKV4Z/JPv+30eDN4+PD2+G4e47NWa3P5oznxzQ29DwcDpsPQ/LDVPTDVqcfdHdHv0zQL5P0yzT9MkO/zNIvO90+blBuMujpaffLYfu8fdrsY5NGR55sGl21ubG3l59fdr+8pI2JuDHV1tjL8eN+97p9iFsy53ZOP92Gn97e3O71uN29HBL77uJ2140+8bq9/zTsD9td8ltl5KSy8Ze+bn8efk0acZFDnPxgfXr7wp38WqqTG8j16e2PZ3gGf6DuYciLxu8VhOdF/zLuYQn/Q140rkTx/zPSwEbDP4nYbhv9eOvoBbn2xtDL0atzo841PtLDYXs4bl7S8UNHT5XGjTtB3UxSN6Nx485QN7PUzabHuqZu1tjvj8fNw0f/T/HjigwSck3Pa9XZJPNlG70D+nXjfqPTy7b0sk/eM5LbPr3jr69D+obW8RvSTc39tDkMb/unuJ3I6cl36C0ZGt0FvSXqBeOZ4eHlCPofR2MgvWR7R84vyfkn71wZegxtb/Lh4yZ1KhPPAuRU1FepR0lNQ/Yd9VWyQGsyV1BfdW3D7WgKe4Qy6ivq9O7XigYOGi/okcm1o4FjRQMHGUOeM36opMmYp2Gzn0+S67v48TT+rt3j8NPT7uHnxMmirtLYyvPr03BMPTUOIUbq2NbQy+G4f3s47pIZMu5m5EunB01DttBrcluawx3N4TR4jyf/nfyXwhRHowy9DBsClxZLj3yIiRxCkKalyWAEf+F/+hTYiBNpjBsLOxriQ4k1sbltTT1ujpukDRu30eZyj8PwehiGxONWka8IPb2KtqnxcXi/eXs6plGQjqOgtrDl1M79OFze884ehY3y9GppOBM0zkt5slyu1tTZKUJeT+HC6V/GzzkH1zNt/kDmPe8eh9S26LfKk00rS7Y5so36x/qObJNkEv0P9Y/xg1dtJh0e9lsfRyRPX8SxCfUsR0nCyH1aWh8+H4eXx+ExcZTotVKzNG9Rvxc0EdAgrBQFFYL+x1FQQfOIJevG/bannkt+SA/TiTYPf78ZB7DtkHTdtYhH57Yn8H44Pnycve/xvJ1zU42d5NxUYpWKOjBlc4IehaC4RVI2pyibU/RwNIULhsZd0xg3vt8+HYdkmF/Hv+uuLbYJrfzjbfO0Pf46e1QqinMopROU0gnqwJJSOkUpnaJoSJNXGYqGTGPQ9X6/ex5HkhENRXFXPNCptuhzbIn+kYdxKm6u0RP2u+cP2/dJM3FwrNti1LGZ7fPmwyyyjKcV3TatjG39/XX4kLQTTy26sePtd8+vj+lPc3EzjZ45NvOSWhPP7brdAw6f0mbWcTNt4/7YzHH7PvlVq7u4nbbBemznl+Gn16SdONRrTB7fv708zEZ8Hb0tR53HNQbXH4aXYb8Z23zYvbzffkgHu3Uc8bS9wA9DEg2sVTyutD31D8Nx85Q49lrHrbQ98w/D8adfD8P+0/Yhib3WSSbT9tzjtmYD3Toex+/axpUP45QwS2tU5OmUngtKzwVNhZLSc0XpuaKJWFNcYWgiHr9A12rQPKuJh/DGYsnUUDoDx7+sMRP/MBxPcZd/5G+zcmJcmKR5hfJOsaLgi0JaRXmnchQGGnpIlJOM39trNG3mBqvIDcZzwnvaSZ6VjnNJevk0+YsVxbmUdCma/BUlXeNJdaffRUGla/bzYA9zTB0HUPSEacIWFHFLikcUTdiKyhfakCUUBhrX/IjiCCN9UnHQQg+IOoWgYFtOiQB1CkXRqTZkjaBk1DW6+W734Slxx1WcppCvWdPo6745WLON+qAVja/Rt3bc7dLYVURv0arGpva7fyS/Mk54qCNZ0xZrjI2h32gij3eNA9Y2yU1EHEw7iqadafuR25fXtzSHlXHZg4YSR2GsozqSaxwvvMD9w+6QqsQ9fVyK1dfU/euwvz/ufh7ScMDET2LVNmmHRg8PH4fnTfpY45KnaYu9QmPesPQ1i/ihTs/StgUqvtXd2/H17YiaNnHTjU/y8NOw2Q/7zdvxY5LTR23J0/hALiAo95JU/JJrGl1oNtFUbdIELEyrHx6mf4oLPXHgqhp7x8HnB0n8G8etqtFvD0+7h01a8bDxAHD66ad3SXMTlVfEVNh2lO5RCUZSXUAS8JFrmqhp+lKGMkGq6Gkqdev1hCgo4qE+aWybm/79wIhW9LtOP+PkoZSbU3AlKVyQ9LMUgShN84gmUGtoMLKU+1o51SepcE8Pzom2/vrz8Ov9ftgcdi/blw/3qB4aTUar6SVMLktxj2mb7J6eUiIcdRBFzm5NWz9+enr+OGweh33SgeU6bvL0+Cn2ceQq9JQl4XBlKfClWpKhAoqxbT3l6el5+3Ic9u83aZowfp4oGlLafOrp6RnMbyrqeeTJNDQIqoJLqtUpIpfKUpxKLmTIhYxt679PT8+nrIVNuCKecNsGp9lIIOKo0DXyk+fN/udHxs3jZQGNjXy+z84yJuZrq7b3NjYY5hbUYoypLfUmt2rzr7Hp48fty89jT503Ho/tp/dOYbygCUVSGC+pJyhyFk10W9PyCaMnAk68mwrTlgYAS6GxE+3PBzyYeEahkIvSVhrEJQX8kjxdTYM40VpNUNpQ1mspj7M0A9sp2KQJ19Fjcau23gCWpIh4HYNtDMFO7YBhd1wqG3Wstgzn1Nx+l+YT46rZqK22kfrUFguOYvx+emQ0Q1NeJmnikoSVFQ2pmmiyJsxrKN22lJZaCgUsea+led2RAzga+1xj1WxM+NInEi+RWLUNW/MSflIGPllEUw3lg5L6lSRaoWgQ1pL+h0r0hrJ0SymxJRplqT9YmnMdPWhHEYJbNfrJLP9dxetnGtfzwPxeRS5C1JwiLOGmkJceBUFzRYBV06BjaNAxtuP1HDdp3dHo+D03dkpfI9ozWhPXLRor0LjYJOJqP/kxzeG0HknSgCbp3appMRvFxJoYlyYEYqiiYhvh3NnC2RIVY+In1xaR+euT8SL6rZZeurubeu/UaWj8bQwjd09PG5Zf3sW1LjGFkm2zUmgPFRRiLGsb84Hd6/Cy2abWxcVrN72ltok/tAeti5PVxpcUWpuVdOK42TWuO4GLzyLvprdK/Z8SK0nzgKRYQ5EraEn/Q0Vqo6cJgfyGBkdHg6OjocY14o9TiDavo8SzbuvrPrflCynndAqUVOKnvGp0dtb+vFUbV3/WbXXCfJAq4iB16qKNSdDrZr95Ho4sKROxW1gKB1wjxXnd7A8oRtJxp6ISMsW+goI7Scm2otFW0SStabg1NEmbVdvTi016+Pj2kqyD0TFjopSZwloxVSkoKFfUTxQNDHqqQNA0YBrnLm8XCt10vGCXHhHFcIJCCjkVTCjUUtR5NZVQDHVe0zjgBJt48BSzRiqrCxq3Bc1xkmYGRcGOoiBCU8ZuKIwyjTVKbxAYu3Tk+OQ0gpxGUBFBUmSnyGkURXaa8mxDU7NpTBu9Sel68pjAU9xLTk7vQtJPl5T3KD1FB/Q/VHE005IsAht2WkJLPcLSg3eiLZQMhu+Hw+vu5ZBO/nG1cXXX0a/SsSMa26ZZ4/RbT4+FclAq+4mpzkYvRlJdUeqpcjWtL6UnN7mancqv024BepYkYSirNJMXTguNyD+c7BjeKLFP/DFKUmiwEDRYCKr7SBrEFA0WimJGTVG1oVnVNNYaEpvmA1z0ZmnEEOSkgsJ9SaOYohFD0XSv6REb8lLTmHkHw8ZJaxZYx+yH3qWgYUPQm5PUdxT1HUV9R9O+F0N1DtOYGp6s2qVJVVwbI6cQNGgIqjxK8iBFg4ai7FjTRgNDtQvTmOGdDZq9vLiwQK4jpnSDSoaShjJFrqMoSdW0UtJQqm4aIxlv1fy9RdM4uYyYIkTKECX5siKXUZT2aFoeaWhkMI1B0Otmf9xung7H/bB5hoNYnM862eajJ8o8XxoXY6speacAgdxAkhtIcgNFVQFNfqmpzG8ovrATJyacZKc0g2YuR2951bj5aopk00whLmev2x7Jfni/Hw4JHFvFCznI7XTjpoJTezjDjpN+6u1imilpvJc0Bil6qopSDk1juaEJ0qxb7fqwPfC3LqOByTZmuPvhefdpAAtp4qUqjStZkWPH6T15npg8j9CHpBFR0QCkKJrQhJwMOZyZ6nSNa8hndcp4fbZtxACH4SXh9zG0pjFM0BgmqfQgaWRVE3mkLqWptm2IMJp1qymzJWJxLjUVAqdeGP5LiYqYzKXKp5zsJpIlp8W81KnVtOx1ikspMtD00vS0dJqYg6bZx0wrf0ndUqZi6fdbygEsTe2OhmdHP8aRhKMo0zV2mdNTm9WO4lWxq8ZMESIqEa89bOTnh93bPn2T8Y5M0bjkJcwriTHxClTyg+n9T6+dXvJqesnU68hdNQW9xkzvjerlFOxayhfseiqYTHClbYIMv2D78mGMJR7YWkoRb+VyjSuOS3NtzNsaOenhuN8+pDWcGJK6xiWeh7fX193+eLjfvD1ud/fzZTw2jgPWbeMbazRUXNJW43JT6yhDrdIS3vvxvfBJOsb2bt34LKnl13Hp9PH+YfPwcdZuXFBfN/ZKahfHE/EWJtcYT0xNftry/cQi3s7k1o09ldr7Zfjp/jBuzPuYthm7ZuOSzMOvh+OQ9P4YKFNnF1NnJ+AhKb5WU6I7DduU2Rrq46b13Xpj7rennXmzvT5xiKLaHPG4+ZAOtPHWOdcIkY7D8+u4bPxtn44F0ZhNcw2laYICN0mBm6TATdHko+nRaZpdDVV07AQHpt19FINbmhWdbOvhPkNOVp9Ez5HGWgrjBQVTkuZlScGUojBey2mCnlYjkblUibJUlrAT97ubpmMq1zZuBURlh5iN0qMW06OmwoekpEuRyYqmJE3xi6GwxVC0YicmQCtwXOPWpxmwiJe4u8YlQ2MjfrhMHTf+nISbdro1LrhFM2Nc5qe3Lqa3TqhbUpqhKIRW5K2aaK8hJzUU1bnGbX2zcoSMpjCK9CmcFBQPSwowJMXDiiJETVbq86ovckkq/lgKAy1FNY4SeCfbRqrj7nXOp0W85WrVuKvsuDtungDjid924/Kn0BSAJXGt3zWS6tke3jhJmDYYk5e4xg+TvL3g5DNeqm0bm5oz6/hnTiCXQk/X3G5qnE0SY+om5IBE+SRlmooSEkWDoyYkaGhMNI0T/qdhv33/68Pu5WWYzYY2eWRkFllDxXNJeaKiuVhRv9BUTjfUHUxjzPDLfvP6OjzOBrsY3zjb3dYsco9Rh2v8JMdnzrTjMgsN59a0Gff5OaVBUVWjfv/fbm9et6/D0/ZluPnyx7/961//F3gkpv4="; \ No newline at end of file +window.searchData = "eJzFnV2TI7expv9L67bPmPgu6k62dbzalWwfSz4bGwpHB9Vd08NRd7NNskejcPi/bxSKKAJZb+KjSO1eWZ6uwpusSgCJfBKof93sd78cbr788V83P29fHm6+1Lc3L5vn/ubLm9fN/tDvDze3N2/7p5svb553D29P/eF3p39/9+H4/HRze3P/tDkc+sPNlzc3/74NrdhzM/e7h/73T7v7n6eG3r+93B+3u5dzU9M1oNHbwZD+5RhZBHU+HnYvGYnhzxe0/rzZ//yw+yWnEC65QOWzv5ET+Pz81Nq2kN3U+Fcvxw/73ev2fpI4tfK76S/Zd2qEjF7qy+G4f7s/7vaF1r5IL8WGn02LTF/J84P5aXPo/75/KmkNl73t019wPDz8x/bwH9uXD/1+e+wfmnT7z8f+5cHflBWOrruScuLOjOrpmispPm8+3x0/bF9+3r483h13P/cvh5IB+JZr2lNvxrXV+8Nh89iXtc/XXUt599AXPT1cdE3Nvw8/pEr47XTlldR3r36gK0mfL7uS7jhelmSnq66k+s+3zdP2+Ot/bp+OZfHTxe/DxVey4dDvP23viy/7fNm1dI/7fvNclA1XXUn12D+/9vvN8W1f/MXppdfSH4bIovLpomtp7nZPxR4VLrqS5ld//ebuv7/+2/ff/OXPJeXN6/buU78/bMkM1qT3x6//86u/f/vD3e+/+v7ru7//7duS6EP/fvP2dLwbwoQ7GicsUv7uL3/8ulp2PmY3aW4Pv+83+37/1dvxQ0lye/jJX7sZr73S+90evt3db4pz0/bwdLrsSrrfjXPs16cg6w+7l2P/cvzm5fXt+L/6Xytn6BCi3Y93b4e7f/Z3X3U2Kfa582X/X8by/xBNHiitFPqstXndVjzxzev20idLdO8/bI4V8f9wWWu/JkptPk6e5oW/8unp+X/0m4cKJ3p6ev4wXbn4tz49Pf+lLvR6enq+RvRF9P2AeKh4r+OFF77Ztv556Ztd6fPKe/PTbn8sdprTRdfSfHg4DZtF4YeH5+nKa6kfDtvDcfNS/tXRhVfSHgaBmoHieorv++P9h++8j5aE/aXP4dJr6fvVwX+NS4XvapaP4x2nxUV7XJKoP/bHP9QNz4/Dj5+P0Bf99sf++Mcxvvqueg372B9PMdmVVrPUojpfeOyP1/aESbvubeBx9VILYkeseQixG17xWfiB/KvjcXP/4bkvj0T+8k18+W9jx+EUvjbac7ifbrumXS3GIAsWK/7hw1t5TRzL3p9uuET7u8qknr/4Kpk9YEPN+DwacNm47NuojO/8tSjCa1f84ZQPrpI8ni++hma9UwXha3jVD0PquWriGaWHy+dzzhLdihTPSZLmeRaqNTzg4fJrPN36B3vleXzfv9/3hw/1YcXpht8iptj3h9fdy6HChOm6Kykf+pci/Tpdcy3FXw/HvpwjDlddSfVYOWzBEesy5d3u6Q+bp+KcMFx3P153JeW3Cvrxdin7SBQ/9fvt+1//sHt56T3cLqmP19/H11+QNYth+F+/+STm6sO/XgeCTy3VAXBvTiv8Pms0g++cHg+9z4LtwDun+BHC7rPaxybQnVOqgtxn4YsAd9GOOvlrqrLxb6TZHPZmFXG0G8m1geyiFjNTE8HG6TmnysLrs2Rz6jSnx0Hrs1wrsM6pFWD1WXQhqM5ps2DjrNoMqLN6DJyO5BrBdE4tC6XPkouAdFYXw+hIsQ1EZ7XwCiXSagPQOa0yDD7L1oPgGkUGAs/lCgv6nFYe/p6lWPDboMQAsFikEfDm9P7X1//n7m9ff/X9X/78zZ//dPeHv/z5h6///ENO/ef+17t9vznsXoZJuZiUymm3guXZLHkxVC6P9Nn+0wyTrzPe5iAyUagEyGeJVnic12PBcRSfl6BxXqG+v7QgxbxmDhSfZRlIfJkum1BMdC+NeOpBMQns8pA4r1Lf3y55kxVgOOoMbVA4q5UBwpHgAhicVeVBcCTaDoFzmhgApx3+OkpZ8HsWXAR9s7oVwDeSb4C9OdUc6D2rLYG8BdUKwJvoXwB3C5aU3/UCqFulWX7qC2BuQbkAchP1hRA3Z0ER4JJxmYG3S/V4UMvqtkPaoj21RrTGwa1gFssVAE9RkweyRO/irFQ1iKXCy8fLPIAlOkX4WlTiwSuRKkPXaq06Z6mErWXVDGilkhWQtUKvkL6ogat1KpUPsgKqFvXqHuAV580KkHoWvwCi5m3gAGos3QpPc4oYnMZL6RZomlVigGmk1QhLc2o8KCUpxCZImlVkAWmaSGyDozlFDEbPam1QNKdUBqJn1eUwlGZ9YhCaia+mP10HiabN1XHRs3VM0uoUiPzw6yvo23PF4drjeO1SxYfNcVOUOl20VONY83PafwdNjnGxJX5uF2ltD3/c3b/V+NkX28PD+dJLFL95hpPNTG77nI0TqrQgcZgpZbNTFTqH3dseZWiJ0HRZi1KSYNjvnn+/OfRWF7WGS38Kl16iF/yj5kkO1wcnaX+mVPlP3/xnleLj9v2FSt4ja3+gd8vLf93//OvXf6rS+/jaZ9YEVVp//WPdk3x9uPRJ/vXPdT/qNbfOqVL6/r/rlA6fLlX64Zv/rHt8x+37S5/f//7693+t0vql/+m1cSaLwos/9v3r930/X9qEP1wltEgaqwosJrsaa65Spdayq5IqW3mVyjYXX5V0P6L6q1TzY0sJVkmvpgorlb+kEKvKmmojrqzNJcCIcmv6q6gLk19EtKk0q0oRL/+BbNvSv6TN1Wilwq3QsqTKVGqloo3FWiXNfL1WKr2sZKtkAVdFkGq3Fm4VVXHtFhFtK98qaeYquFLhJUVcRXVYx0V0m0q5ioowHUoUmwq6SorFmq5UvLqsq1YXV3Zh0TwoKClm67tSwUVne5T1cRULlW4r/Cqp1tV+pTaUy78usaixIgzO0pcWhdXNK6We2FoadtVxPVMgBnTqasRSocYysbIqVylG1haNpQ1l3aae11B4VFbOVJGl4gsKyarUOTY6U79CZFZdUQaC0aaiirJ2U/+98I2Xq81It2oqOCsq8jVnRLa97KyozVaeEenm4rOSMqw/mw8jV9PLVaGlsksK0Yrq5Vo0YkS+HO0SWzIVaqkNC4rUKrTLdWozK5aXqlXYU+UT7QVr1cpV7+E6I2xT5drMhmXFayU7SvVrYLxfcP5IoxVsVVvWmubCtiqrGkz5LfVx7Q1vxLwA52JL2Do4YMU1coG11XBI/qojdrZGDqhfIxKsr5wDBjSXtjRZUO2KfEnd5bbwVXbIEFxodwUryskmpvzuOtr1rwIX5V1sRfUruO7TL1frpSYsL9grW8LU7FEDGsv2Srqwco+mORqK94p6uH6PKLaV8JU02So+kEC+5mjH1vLN08hN5XwlXVjRl2o2FfWV9Ip1fan24tI+lLWL8PufdrvHp3kHGv/5Kug9aqoKvJ8sasTusUordM8rssg9lmwG7nnNjwi3x3ofW2B7XqsGtcfSl4D2CksqDbiqLhdbJ6qtQXVBE0bTiWBTGF2hhufqmWTbFJ3X5bB6LNoasOcVGaQeCzYC9bxeHqfHsstgel6dQy6xbitILyhijJ4ItkH0vF4OoceiSwB6QRni80SzCZ4X1OBqJlFrWsbk1YrYPBauhuZ1mhiZI8E8MM+rZXF5LLYIlpe0MbBLZdtAeV6xEUqDee1SJF0zKuf9uxVHX21kzKDomUYdiI5FGjF0SZGD0EmUXTivpKTR4L8N8LGkmoHNsXDFhy0qlLiEIlG6ODaphsqzMCy7E6Sk09DfLnqDZXicdIUmdFxQ48FxItmOjQu6LDROZJuRcV4VAmPa5a+klYPFseQSVFxQLoPixID6U0vyuhkoHOsxSLhJpwyAieJy/Fu0peIdt6PfStWKZ71kFGyCvERvGeLN21ACvLPxuOKEkiZFFuZmlCHKbbSgWvZ6WhjHcIJtOKbCChbQziyAmaRWteLoWHE2SYVOTZxUdTpJhRYLWWdixfNJGtQq3WYZTq2xg4epcyPKZ5ZUKZZSDRWnltTq1D7edkRaYUHlY73mvFqGo7H8cjRasoIBo6l4IxbNa0Iomi63G5BoQQsD0UStDYfm9VgYOksHtqDQgiYHQmlSsAmD5jUhBI31mhBoXqsIQGPdxfizsOIY9vb7K2oWk36Hv///aE3Z8mbrNY+7hYoJ5N3v/jlX2e/+eR3AGxqqw7uDLa1wd1JoRrsZNR7sTnLtWDej9xFC3UnrYxPSzehUAd1J9iKcW7KiSvyKmmwMflZsxrg5PRyFn8XaEG5JiYknUrnGMCKjycLbSbA5PZpR48DtJNaKbTNaBWg7SS5EthllFktMms24NqfGwNqzWCOqzWhlQe0kuAjT5lQxpD3rtSHanBJeNZ2V2vBsRqkMZyfRejRboceA2ZlYIamQUcpD2UloGZLN6jJAK5JsxLEZtbo9y2ftts9V5JRbMTCdFS+GwMXxPdeDmgHwNcbZHPxN269Ev5NAK/jNqrHY9xx7N+5Wy+tV95gWfJhVzOHfSXTBPuOiKpvijFUvjHTqMXAazDXtestrVve/C95oBQ4+d482GJxTyqDgs9wCEJzT5DHwWbIdAmcUMQJOuv9VdLL4d5JbBH9zqhXo9yzeAH4zmjnsO2kt2Adc0KxAwLH6BQA4b0fxHS+AvzWKxed9+cjXhoFj7YUQOKNfRMDp+Ltgf2+DOo+DOSua9/UWrak04bfSZYAUFG8EUiULeEScql+ao6rGxUT2kmxVAzpOVYvguKTDY+NUqB3P1CpXudRCgFy0IYOPiQEV8Lislk+CLNhvW6dZ94gXQOSSetWjvd68XIGPJ+kL4HHWAg4dR8Kt4Dijh7FxtExvgcY5HQYZn5UagXFGi8fFaWLyWqMRj4qT9GQbKM7oYUw8abVB4oxOGRFPmssBMckiRdj022+/myl+++13V4GmoZ0qZjoY0ohMp/YRMa1smwWkU+OQj1a2/hHh0Knlj5SGVrZaAz8nkRL7bNGskVquwEWO5/ZLRYW51mF0eG46u8IutYtnsbTxfFSSUeAQ5dR8KbrMtM0AyalpwCMrW87jx0mAp4+VOlwSfFJArLG2bYwWz03PyWJlyzmQODXPccRaDYgNz63PqGFtuzA+PrebLanMtFtEgpNENRGsUMNAcCa1eHzI4sBJpuqD9VkVjDIigTn7q2y7EbjR0bqGtzWNVhnfQ3TtyuNIhqWlrdehtKl5QNKq2+bA2Tn+KmzpybZe61tLn0wGik0SFRsiSxpcqibWaJtJq4FXGghkt/tkFWr9v/5NlGHW2UFnLKu2XR5dnRvH5KpWgQVVZwHIqSrbh1gq6VxLWs1BqKlxjkHVapSR01mqnjhlFDPAaVKq2GSYVyjjpVgrT5fqVUvvCrOk1vZLT655DGkCRbESz4kq1UpYKB23KrYF1muxEIjTLG4ILGnXCV5JBaeKoVT+Q8klPRbnpFqNK/NaeENElo5JWVSTajRGAPWgJpUpbu6r1alxhLoPjxcVeQhD5MoMpqyVXWJWbN2rUqh6eOWPjZe0ah7a4udVhimTUJ6lVOsx6CSSAeSksnUISqIl2EttFrYGi5zbnVORypZZCJKmWRb2dBZ5JMmWGfGobB0CjqnlGd+obLWIMyaFLM3IrNojeJHx+vOfroIySHNVRCOyrjHpQNVKOcyyUjG5RiWrc2xQG4a2Rc0Q3V6g9YzDWqpUiGnLOqextCh0vm6p0ubh4Q9vh+NuPn5Rrc3Dw324cqna/VO/2Vfq+WsvVny/GTrRFoSYVC++crEa5hEzqTyPqPL5Gne/TOErMDUAkdz8UKXz+1+/rxyoHvvjT78uGq1YzeqhYxK+dAwZUhh1PWBIZFzq/5NauQNMcv8PR67/kBdpPe8+9ZUPc7x4yfOMQ4K/PD1tnjczsfGfrxIKRE1VhQEnixprG2KV1g3heUW24iGWbN4Untf8iOogYr2PLRvD81o11RGx9CWbwyssqTTgqrpc0iZRbS3ALWjCYToRbCq9rVDDgf9Msq1sMK/L1WLEoq3bqPKKTIVGLNi4aTyvl6/biGWXbRzPq3MLoli3dfN4QRHXeCSCbRvI83q5yo9YdMkm8oIyrAdJNJs2khfUYAovUWsqoc6rFZe3sXD10rZOE1eQIMF8YJpXy9aRxGKLNpaXtHEFQCqbLzDJKzTWmIB57NJt3TWjcN6fW7d2X20kzOD+mUZdVUos0rjFu6TI1aokUXUBDpc0Gvy1YSNwSTVTxxILV5SyVChxNIsoXRyLVNe4zMKuLKKu0qmSuUSloVdf5CflKpukwzVtGi+o8bU3iWT7xvGCLluRk8g2bx7Pq8I6HTqwXEnrXL2TVTzX7zR4aW2VEBZqGVcaa4USweUfsM7bkakgivUXbFov6pbriogFyzeuF22peOftm9crVSueffsG9qJ2vjKJ6C/bxJ63oVSvNJsXKkqWmhTZqqWMcrFwqcKCatnraeHaC04wX4BRocrWMc0Ui6VMNWrF0bOioKlCpyYKrCprqtBiK5tmYsWShwa1SjdZtvG8xg6+6mluRLnwqUqxlDipKH+q1al9vO2bzissqHys15xXy9VSsfzyzeclK5gaqlS8cQN6XhNWVqXJhIZN6AUtXG+VqLVtRM/rsVVYs+Rmy2b0giZXm0VTnE0b0vOasGIr1mvalJ7XKtZxxbrVpVzznFeMbl/7l6++mQv5f74Ouj03VYduR4ta0W2k0oxus4o8uo0k29FtVvMjRLeR3scmdJvVqkK3kfRF6LZsSaUBV9Vl49RYtRnd5jVxtBoLti3vy2rM3EslGyfdrC6LbiPR5nRpVpFDt5FgK7rN6hXQbSS7EN1m1VlgEek2o9u8IoNuY8FGdJvVy6LbSHQRus0rY3Qba7ah27waXoHEam3oNqtWRreRcD26rdJk0C0QLCzVs2p5dBuJVZ0CUNJi0Fci03YOeF6xFd3O57HWEwJqRt28/zaj2muNfDlUSzUqUW0k0opqC4osqo2j6BKqLWg0+GsLgiuo5lBtJLzgJO4KZTZplypfHIvUo1saduXRbUGnof9d9EYroGrcNdqgal4tA1VjyQVQNa/LQ9VYth2qZlUxVCVDwJW0srAzklxyMndBuQJ+xgbUn5WQ183BzkhvCews6VbAztSCC2BnyZaKd74AdtapVjz7BbCzpF2Anan+QtiZtaEIO+l4XQM7WxR52MkrN5/UXWFRtRlF+FmtxWAXRrAAP8uqPPykipdnlqph6Ez6glE0D0OpUhGGlrV4GErFyjC0Xq3SberOe6jRzcDPmWgF/KxRLKUeauBnpU7t4yyfAFGhWPkYrzmvVsDOSP4C2FmwgoOdiXgr7MxqYtiZLMdbYGdei4GdsVoj7Mzq8bCTpgObYGdek4WdJCnYBjuzmhh2RnptsDOrVYadke7iU7jnWaQIfn4G5PPzlbDn52mpVsE8P7cDz6n9VtqZ0WJR5yTWzDkzah8R5JyUPrYQzoxKDd6cRC9hmyUbaqSvp8hFn2e91qgzpwajzbNUE8ks6eBZNRVrm0szihzAnORaM4YZLQZdfk5ilfrRN6OUh5aT4DJimdHlkvafzyFCG6vMaWFQeZZqo5QZpRyinOSW8MmcJoSTn5NQ5Co6cG3wOQ4/ruL7RSA5SVbTyAo1jCJnUvlVckYnCyEnmUWbR7OqGO9Egm0sMqNV90niSbn8ReKFdjQCUToLXrqRtTiqZ/pQKxe9wviaIaJp63U4dGq+kYVmtTgQeo6rG3PzWbXaPtNAz7J6GRg6SS4goSVNLl0Xa14W0VQD0DRga8rzZxVr+9vyN1mGoOcO0URAczo8/jyLtbPPnCILPs+CzdQzoweRZ9LZr6GSg52T2BLSmdMsY86z9PINnhkLMsBzUl5AO/OKZdQZay/nnHkrSu+6nXDW6JWe9MVjXhPYjJWXUc2MeglppuPugq8Q12uzcJOzoZlslmypM+A3UsVoBUq3bSsr6bOoM9W+MONUCzmJ6LXG0CzuTDUvjKDqwWcq28wfanVrHGvZZtCiBTwMJfKYhF6mnU19LPgicZVi1cNu3xpa0q55yFd7vmVIOgkvJ6RZfQaPRrKNbDSjBsFotCRvoKI5FYxEzzptPDSjxMLQNP14pZGIxaBJErKJgWbUIACdlJroZ0aliD4nxcXck6gbO4l/9XL8sN+9bu/pZLZ9Ofb795v7/vA7ek2WiCYpKJIiKrXZmjKaGc+ZcY7xGmxJbrquQRTjFo1pxbq1hswYb9GSZuZba8rHGAAXzWgCwrUm5Ohw0aJLaHGTgUvs+o3MoaF+2ZjWqL/alCT6L9vRtBCoNYIw26IVjQy31gwMdIvWLAO8tUZRGlE0p5X+VhuSouCyHW1ouNYMxImLtizhxtUGJRC5bEoTVG4yIo4m6+zIVfdWSycrvLJu02IvY0Qcjf1pt3t86mluP7IluaA+Dnvd7HHoM2/vi3AtfpqphYzcfvdUaf0Xp0vbxOaPLBO9JhdcI3SdN9gat6Y2LwtakRULItYqUzLhKjCjNVatMuGUqqx9FNHlOde6IDoGos2hcZURj/3LMORvdy9/2L2838KhERhzvu0+3HbBk+CCcyDcFJlXiVeG5cCWS2LyetOaLfotDMmE4siM1ji8zgguCEcWNEXgVfJ8+A30G2PvKgOKgTewY1nUXWVOJuQGhrTG23UmsME2sqAt0q4zwCc677anqvtt9TAGb7xgBC3E+8CCJcF+nSlcpI+MaArz6+S5QBvJN0XZnPw8XhzgCm/C8Nf6SPGhP9zvt69535qa/CK9POdR3khG1P9PhdrpuqUyr5v95rk/xiVUGbHk6gbJ+P34Ks2/vB1f344/sBPr7KL6t+WrOTNzNm76C3Ib/nFz27mNBP6adivofe1mxI/622+/+yZIIiPiv1+0CyrXZtWOqMRS5plOS+w62U10+QWif/3mk6gUfN1+4ittq8TmdTV5xfJxATWyf+z71+/7Hs4Yc9GHvn89jFcvlxxHiTrBx3DtBXL73T8rxcYrl0sNX9KuUnp6yn4OvSgEcHpWr+6b9EXZ0/GyVZK7cO0FcumBtXm51/5ls71IjpYTZ/VQKX+r4Ckor1SMrl4u+bn2eX5uf5ik7uJxe2AWSnO16Orlkm8vbaLJ9W2y0Tybyec2Z3LvSU0h09YXpZNSFmZw63O3+aztOExksrbJBdfI2s4bXPZdtcuytsiKBVnbKlMyWVtgxrIvHF+QQAU2LPzk8cLcJTCgKXdZJV6ZuwS2XP5R5Ityl5xFv4UhmdwlMmPZZ5OX5i6RBQu+tVWQp1vL8wbUfUxmcboUKC76wvGl6VJgxyWfPF6eLgWGLPsG8uJ0KbJgyUeRL8tSAiuWfyV5aZYSGTHLUrbLcVlJJLfgu8n5rOS4jsnFQ/EFV4mHZg0uO7z4wngIWLEkHqoxJRcPzc1Y9tmQS+KhuQ0LvyNSMMKnMistCNcyHatGjg2/5mpt4VeN+BCyFJOtwBJ03wXPoDYKxIZc9n2Vy6JAxqLfwpBcFAjMWHZO5uIoEFiw4JMsy0Oyuf6iL5dcHJLN7bjkUyYFc6bTQSpNia+/oLvmIsG56LJPqiyPBIEFS76xcmEkOLdi+UdXFkeCwIgFX2FZHBkC+QWfZamJDDleff7r1Xg1abKKV0dGNvJqqpbl1WWZPK+mYhW8uix5OO6395l46iw3XblU6vjra9VDPF3XIJN6XGYVcsX1R37lkZp82RqjYnXRIJddRxRWEA0y+bVCaZXQIMQH6LnQvEGgOvptiHtb5RtULxPLRrGl+LVFiI9UszFqg0QuGs3HoQ0iFRFnVazZIJkN7wqBXYtMJoTLB28NIsUwrSJAa5HjQ7H6dNzicCsbaOUl4gnurzzFP/0pO8GRpo7bzdP3/h3+jR4kkDY8v7AF+z6/PvXHxqa/iG7DDwrb384oMjaUWEWTCZmumzGh1JHbTGC7dc6CfCdvMoDtgxn9fI8syccen3PyJX7NljNQT87XM5QeWm5eTpWKE/MF3aStZ1zQGdr8v8rjmCwM8LNc/qUotds93Q3nd1Q8v+TaZXJvXGFOqpSviyv0mLFbbV8ef6CHl0R6s4saKprhEgE3+MWWXybM7WwFCJxmFiJUy3IpBEY1m0nIic7fXW7MWz6jsyMfnsnbTomrm0pyoyKyojnpfnFgURtRXG5EZjStjSkuNyI30rLzesvZSpVmZEdhaAgeiy83hR2hkRVtZ4JVxTtc5vWqOde2bGsuRTjuUzncf+hxFfZZiVzZLsWNx5XJ3EwGMjdRXmt+rJ4WF8+GLZPg0rmvYcorzHTsVoG/txUoj251vzvAZ3Ju7IvkQmzy33MlyqWNXDOpRmidFX9KzofHqvPPKlQ3fyo9qHiK6ZXLpaqeI18RsfhBHnfHzVPND00uXPA7x/urfia5tE4s7kv/e795fe0fuKkj+nN9v3r/9sJuX6YNfhFdjK2PLWyETTOxLG3CQszD4sZ8csnVH9p5BG15cHQwXa2dMOeNkH5rIGVpw7M6/C7+Uz7VSNuc6NUP8ds5tZr8sandYQMcNjX6S1OL3ySz4tiW/7emVr799rvv6Q6osa3oL00tnral0KO5x0bTPy5p92/xvpmk0eEvbS0mgOfU1vBv7a38sHlEDf2weWxvy497wPvSPy6JF+5e+/046pab/oK5jdmLlBqe4YQwqOCtALdcagGej/Mm1FUq1tvQJH4d1d1DhUN9cbrsUiXYrYAUH73VaUXBmffS88euat283MI17Vtq1TVsoekXXr2Uza7Ue3t93e2Ph7vN28N2d7cF8xUUx7dd0ZLx4babMt13FVtCIORzOnFCqsIecO9VbHrd755fj3f3m/sPjRbN7ryKPfPq1ApTyiWqjVZ82h7iCLfChOmOq+j/0v90d+g3+/sPLTYkd11ix3HzWDNTnS5rVKIR0FgeMMMBo2z6x6bI6r9gEczYbPK3hsq7p6fdL99vn7dPm3KLX/irD9PV+DGlVuZ0//7y88vuF+qUnO7bdPUFusfdK/ngESt63L2CLx7VKNL3dlqC/DnOzo2y0V+y70ydGzt99Gpq6NNmv9389NQffnf6C2roH7c325eH/vPNl/8aPiTgO/aXN/Kdere+ub15v+2fHg43X/4YEoj3u+fTeTAPu/s3/5//OF323/1wBM9w8Xj171Y3tz+ubpV9Z635xz9ufww3+z/4fwhtnP/F3yhubn8Ut1a9k2uR3ChmN4rkRnlz+6O8Vet3a9UlN8rZjTK5Ud3c/qiQoprdqJIb9c3tjxrdqGc36uRGc3P7o7k13TvRqeRGM7vRJDfam9sf7a2W71bdOrnRzm60yY3u5vZHd6tX7+xKJze62Y0uubG7uf2xQzd2sxu75MY19zrWsxvXqQMM/rBGkmLuO4I4j/eeFZIVwH9SBxKDWwgx3KyETG+e+5BInUgMriEkvHnuRyJ1JDG4h1DDi11LcvPcl0TqTGJwEaHhA5v7k0gdSgxuIgy8ee5TInUqMbiKsPBpz/1KpI4lBncR7lapd5126c1z3xKpc4nBZUQHlef+JVIHk4PPCOhhcu5hMvUwOfiMhB4m5x4myRDlxygBlcEolXqYHHxGSjTeyLmHydTD5OAzUt1q9a5TqYfJuYfJ1MPk4DNS32r9rlPpgCXnHiZTD5ODz0gDb557mEw9TA4+I+2t6t4JS5723MNk6mFy8BnpoPLcw2TqYXLNTWBy7mAydTC1Yl1bzR1MpQ6mvIPBIVfNHUylDqa8g63RT1ZzB1NkHlTsw1ZgKkwdTA0uo1ZQee5gKnUwNbiMEvDmuYOp1MHU4DJKwpvnDqZSB1OOe81q7l8q9S81eIxSUHjuXyr1LzW4jIJ9Ss0dTKUOpgeXUbBP6bmD6dTB9OAyysKb5w6mUwfTg8so2Kf03MF06mDaR1odvHnuYJoEW97BoGtrEG+lDqYHl9HQO/XcwXTqYHpwGQ29U88dTKcOpgef0dA79dzDdOphevAZDT1Mzz1Mpx6mB5/R0MP03MN06mFm8BkNPczMPcykHmYGn9HQw8zcw0zqYWbwGQ09zMw9zKQeZgaf0dDDzNzDTOphxsfz0MPM3MMMCel9TA89zICoPvUwM/iMgR5m5h5mUg8zg88Y6GFm7mEm9TAz+IyBHmbmHmZSDzODzxjoYWbuYSb1MDv4jIEeZuceZlMPs4PPGOhhdu5hNvUwO/iMgR5m5x5mUw+zg8+YDgVSdu5hNvUwO/iMgR5m5x5mUw+zg89Y6GF27mGWLBz9ylHApSNYO6YeZgefsRL+5rmH2dTD7OAzFnqYnXuYTT3MDj5joYfZuYfZ1MPc4DMWrrPd3MNc6mFOcEttN3cwlzqYk+xqe+5fLvUvp7gFt5u7l0vdy2luze3m3uVS73KGW3W7uXO51LmcZVfdbu5cjmQmHLvqdiA5kTqX69hVt5s7l0udy63ZVbebO5dLnatbsavubu5cXepcnWBX3d3cu7rUuzrJrrq7uXt1qXt1il2adHP/6lL/6jS76u7mDtalDtYZdtXdzT2sSz2ss+yqu5t7WJd6WOfYVXc397CO5L86duHcgRRY6mHdml04d3MP61IPW6/YhfN67mHr1MPWgl3Lrecetk49bD34jLW3Rr5Tq3RVtJ572Dr1sLViV93ruYetUw9ba245tp472Dp1sLVhXXs9d7B16mBry66613MHW6cOtnbsqns9d7B16mDrjn9TcwdbkyTrml11r0GelSZaV+yye/xbenv0b6f7BbvyHv9G7yf51pXk3vb4J3o7ybiuFLv+Hv9G7ydJ15Vml+Dj3+j9JO+6MuwqfPwbvZ+kXleWXYiPf6P3k+zryrFr8fFv9H6SgF117HJ8/Bu9n+RgV2t2RT7+jd5P3M/n7vGiXKBU/yzXL9h1uYDZfuJ+PoOPl+YCJfxpxt8n8fHqXKCcP036+zw+XqALlPaneX+fysdrdIEy/zT177P5eJkuUPKfZv99Qh+v1AXK/1MA4HP6eLEuEAKgDMCn9fF6XSAKQDCA8Jl9vGQXAAQIQgKET+7jVbsALEAQGCB8fh8v3AXAAYLwAOFT/HjtLgAREAQJCJ/lx8t3AaCAIFRA+EQ/XsELwAUEAQPC5/rxIl4ANCAIGxA+3Y/X8QLQAUHwgPAZf7yUFwAQCEIIhM/649W8AJBAEEogfOIfL+gF4ASCgALhc/94TS8AKhCEFQif/sfLegFogSC4QHgCgFf2AgADQYiB8BAAL+4FYAaCQAPhOYB1aH0vADYQhBsIxdJ0AcCBIORAeBpgO1iqAOCBIPRAeCBg1/h+4H4EIAjPBNzq1uh3nSAMFCAEQRiC8FjACRTlC0ARBMEIwpMBB3mkACBBEJIgPBxwsPBCAJYgCEwQng84WH8hAE4QhCcIjwicwfcj6k7cz1MCZ/H9wP0IVRAeFDiH7wf+R8CC8KzAYf8DaEEQtiA8LnDY/wBdEAQvCE8MuhW+H/gfIQzCQ4MOl/oAxiAIZBCeG3TY/wBmEIQzCI8OOux/gDQIghqEpwcd9j8AGwShDcIDhA77H+ANggAH4RlCZ+HwB5CDIMxBGH74A9BBEOogDFtSJAB2EIQ7CMMWFgkAHgQhD8Kw5UUCoAdB2IOwfI0RgA+C0AdhM2VGgD8IAiCEzVQaAQQhCIMQNlNsBCiEIBhC2Ey9EQARgpAIYTMlR4BFCAIjhM1VHaGyI+J7NlN4BIiEIEhC2EztEYASglAJYTPlR4BLCAImhOMrkARAE4KwCeH4IiQB8IQgfEI4vg5JAEQhCKMQnjvgpKgAmEIQTiFGUIEjP4AqBGEVwvMHnBoVAFcIwiuERxA45yYAsRAEWQhPIXCCVABoIQi1EI6vTBKAWwgCLoRji5MEIBeCoAvR8fVJAsALQeiF6PgSJQH4hSAAQ3R8lZIACEMQhiE6vlBJAIohCMYQHV+rJADIEIRkiI4vVxKAZQgCM0THVywJgDME4RmiY4uWBAAaghAN0fF1SwIwDUGghuj40iUBsIYgXEOs+eolAciGIGhDrPkCJgHghiB0Q6z5GiYB+IYggEOs+TImARCHIIxDrPlKJgEwhyCcQ6z5YiYBSIcgqEOs+XomAWCHILRDrPmSJgF4hyDAQ6z5qiYBkIcgzEOs+cImAaiHINhDrvjaJgmwhyTYQ6748iYJsIck2EOu+AonCbiHJNxDrvgiJwm4hyTcQ674OicJuIck3EOu+FInCbiHJNxDrvhqJwm4hyTcQ674gicJuIck3EOu+JonCbiHJNxDrviyJwm4hyTcQwq+8kkC7iEJ95CCL36SgHtIwj2k4OufJOAeknAPKfgSKAm4hyTcQwq+CkoC7iEJ95CCL4SSgHtIwj2k4GuhJOAeknAPKfhyKAm4hyTcQwq+IkoC7iEJ95CCL4qSgHtIuv3Bc4wO5k0l2gBBd0BItjRKoi0Qsz0QbHWUhJsgiPdJtkBKom0QdB+EZGukJNoIQXdCSLZMSqKtEHQvhOQrpSTaDUG3Q0i+WEqiDRF0R4Tk66Uk2hNBN0VIvmRKon0RBHlIxVdNSYA8JEEeUvGFUxIgD0mQh1R87ZQEyEMS5CEVXz4lAfKQBHlIxVdQSYA8JEEeUvFFVBIgD0mQh1R8HZUEzEMS5iEVX0olAfOQhHlIxVdTScA8JGEeUvEFVRIwD0mYh9R8TZUEzEMS5iE1X1YlAfOQhHlIzzCYLUmAeUjCPKRnGHhXEkAekiAP6REG474AeUiCPKRHGHjhLwHykAR5SI8w8MJfAuQhCfKQOrMdDCAPSZCH9AgDL/wlQB6SIA/pEQZe+EuAPCRBHtIjDLzwlwB5SII8pEcY+PUD4iEJ8ZCeYOCFvwTEQxLiIT3BwAt/CYiHJMRDeoKBF/4SEA9JiIf0BAMv/CUgHpIQD+kRBl74S4A8JEEe0jMMvPCXgHlIwjykhxh44S8B9JAEekhPMfDCXwLqIQn1kB5j4IW/BNhDEuwhPcbAC38JsIck2EN6jIEX/hJgD0mwh/QYAy/8JcAekmAP6TEGs/AH2EMS7CE9xmAW/gB7SII9pMcYzMIfYA9JsIf0GINZ+APsIQn2kB5jMAt/gD0kwR7SYwxm4Q+whyTYQ3qMwSz8AfaQBHtIjzGYhT/AHpJgD+kxBrPwB9hDEuwhPcZgFv4Ae0iCPaTHGMzCH2APSbCH9BiDWfgD7CEJ9pAeYzALf4A9JMEe0mMMZuEPsIck2EN6jMEs/AH2kAR7SM8xmIU/4B6ScA/pOQaz8AfcQxLuIT3HYBb+gHtIwj2k5xjMwh9wD0m4h/Qcg1n4A+4hCfeQnmN0sGJEAu4hCfeQnmN0sGJEAu4hCfeQnmOsVzDxALiHJNxDepCBEw+Ae0jCPaTnGDjxALCHJNhDeoyBY19APSShHnLczgFXXgB6SAI9pIcYeOEHmIckzEN6hsEkHgDzkIR5SM8wmMQDYB6SMA/pGQaTeADMQxLmIcfNHTjxAJiHJMxDeobBJB4A85CEeUjPMJjEA2AekjAP6RkGk3gAzEMS5iE9w2BWboB5SMI8lGcYOPGgAPNQhHkozzBw4kEB5qEI81CeYeDEgwLMQxHmoTzDwIkHBZiHIsxDrfjzTxRgHoowD7Xij0BRgHkowjzUij8FRQHmoQjzUCt+5asA81CEeSjPMHDFggLMQxHmoTzDwIkLBZiHIsxDCfY8MAWQhyLIQ43nOuETVQDyUAR5KI8wuENVgPsR5KE8wmDOVQHIQxHkoTzCYF4fQB6KIA/lEQZzugpAHoogD+URBnPACkAeiiAP5REGc8YKQB6KIA/lEQbz+oH3EeKhPMFgTloBxEMR4qE8wWAOWwHEQxHioTzCwIkLBZCHIshDeYaBExcKMA9FmIfyEAMnLhSAHopAD+UpBk5cKEA9FKEeymMMnLhQAHsogj2Uxxg4caEA9lAEeyiPMXDiQgHsoQj2UB5j4MSFAthDEeyhPMbAiQsFsIei50F5jIETFwqdCEWPhPIYAycuFDoUip4K5TEGTlwodC7U7GAoxSYuFDwaivifxxg4caHQ6VD0eCiPMXDiQqEDougJUR5j4MSFQmdE0UOiPMbAiQuFzomiB0V5jIETFwodFUXPivIYAycuFDotimAP5TEGTlwogD0UwR7KYwycuFAAeyiCPZTHGDhxoQD2UAR7KM8xcOJCAe6hCPdQnmPgxIUC3EMR7qE8x8CJCwW4hyLcQ3mOgRMXCnAPRbiH8hwDJy4U4B6KcA/lOQZOXCjAPRThHspzDJy4UIB7KMI9lOcYOHGhAPdQhHsoDzLWAiUOFAAfioAP5UEGTBwowD0U4R7KcwyYOFAAeyiCPdS40QPfDpyPUA/lKQZMHCgAPRSBHspDDLxwA8xDEeahPMPAiQMFmIcizEN5hoETBwowD0WYh/IMAycOFGAeijAP5RkGThwowDwUYR5q3OoBEwcKMA9FmIcat3rAxIECzEMR5qHGrR4wcaAA81CEeSjLI18FmIcizEONWz1w4gAwD0WYhxq3emD/A8xDEeahPMNgEgeAeSjCPJRnGEziADAPRZiH8gyDSRwA5qEI81CeYTCJA8A8FGEeatzqgadewDwUYR5q3OqBV56AeSjCPJTjj3NRgHkowjyUY090UQB5KII8lOMPdVEAeSiCPJTjz3VRAHkogjyU4492UQB5KII8lONPd1EAeSiCPJTjD3hRAHkogjxUxx/xogDyUAR5qI4/4kUB5KEI8lAde8SLAsRDEeKhOv6IFwWIhyLEQ3X8ES8KEA9FiIfq+CNeFCAeihAP1fFHvChAPBQhHqrjj3hRAHkogjxUxx/xogDzUIR5qI4/4kUB6KEI9FBr/ogXBaiHItRDrfkjXhTAHopgD7Xmj3hRAHsogj3Umj/iRQHsoQj2UGv+iBcFsIci2EOt+SNeFMAeimAPteaPeFEAeyiCPdSaP+JFAeyhCPZQa/6IFwWwhyLYQ635I14UwB6KYA+94o940QB7aII99Io/4kUD7KEJ9tAr/ogXDbCHJthDr/gjXjTAHppgD73ij3jRAHtogj30ij/iRQPsoQn20Cv+iBcNsIcm2EOv+CNeNMAemmAPveKPeNEAe2iCPfSKP+JFA+yhCfbQgj/iRQPuoQn30II/4kUD7qEJ99CCP+JFA+6hCffQgj/iRQPuoQn30II/4kUD7qEJ99CCx74acA9NuIcW7BkHGmAPTbCHHj9wAaNPDbCHJthDC77cWQPuoQn30J5jrOERFxpwD024h/YcYw2PuNCAe2jCPbTnGGuN8i4acA9NuIf2HGMNj7jQgHtowj205xhreMSKBtxDE+6hPcdYwyNWNOAemnAPPe72gAUzGnAPTbiH9hxjDQtmNOAemnAPPW73WMEzVjQAH5qADz3u91jBQ1Y0IB+akA89bvhYYRcE6EMT9KHHHR8r7IOAfWjCPvS45WOFv7ME4Icm8EN7mME4MYAfmsAPPe75WGEvBvRDE/qhx00fK3hSiwb4QxP8oceDruAoBuiHJvRDe5oB05cawA9N4If2MAOuoDVgH5qwDz1u+UD5Gw3QhyboQ3uUAdNHGpAPTT+UoVds+lKjT2XQb2V4koHTlxp9LYN+LsOTDJy+1OiDGfSLGZ5kMBMg+mbG7KMZmk1favjZDOJ6nmTg9KVGX86gn87wJAOnLzX6eAb9eobOzMDo+xn0AxqeZOD0pUaf0KDf0PAkA6cvNfqKBiEf2pMMnL7UgHxoQj60Jxk4fakB+dCEfGiPMpgIBKAPTdCH9iwDpy81YB+asA/tYQZOX2oAPzSBH3qEHzB/pgH90IR+aI8zcPpSA/yhCf7Qhj1qQwP6oQn90IY/aEgD+qEJ/dDjMVd48AX0QxP6oT3NwOlLDeiHJvRDW36/mwb0QxP6oT3NwOlLDeiHJvRDe5qB05ca0A9N6If2NAOnLzWgH5rQDz3SjxWOIAH+0AR/aI8zsP8A+qEJ/dCeZuD8pwb0QxP6oT3NwPlPDeiHJvRDe5rBfGgK0A9N6If2NIP51hSgH5rQD+3G4A9H4AB/aII/tOPPetEAf2iCP7Tjz3rRAH9ogj+048960YB/aMI/tOPPetGAf2jCP7Tjz3rRgH9owj+048960YB/aMI/tOPPetGAf2jCP7Tjz3rRgH9owj90x5/1ogH/0IR/6I4/60UD/qEJ/9Adf9aLBgBEEwCiO/6sFw0AiCYARHf8WS8aABBNAIju+LNeNAAgmgAQ3fFnvWgAQDQBILrjz3rRAIBoAkB0x5/1ogEA0QSA6I4/60UDAKIJANFr/qwXDQCIJgBEr/mzXjQAIJoAEL3mz3rRAIBoAkD0mj/rRQMAogkA0Wv+rBcNAIgmAESv+bNeNAAgmgAQvebPetEAgGgCQPSaP+tFAwCiCQDRa/6sFw0AiCYARK/5s140ACCaABAz7vtYreHHpgEBMYSAmBV72IsBAMQQAGJW7GEvBvAPQ/iHWbGHvRiAPwzBH2bFHvZiAP0whH6YFXvYiwHwwxD4YVb8YS8GwA9D4IdZ8Ye9GAA/DIEfZsUf9mIA/DAEfpgVf9iLAfDDEPhhBH/YiwHwwxD4YQR/2IsB8MMQ+GEEf9iLAfDDEPhhBH/YiwHwwxD4YQR/2IsB8MMQ+GEEf9iLAfDDEPhhBH/YiwH0wxD6YQR/2IsB9MMQ+mEy9MMA+mEI/TCCP+zFAPphCP0wkj/sxQD6YQj9MJJf/BpAPwyhH0byh70YQD8MoR9Gsoe9GAA/DIEfRvKVfwbAD0Pgh5H8YS8GwA9D4IeR/GEvBsAPQ+CHyXz72wD2YQj7MJI/7MUA9GEI+jCSP+zFAPJhCPkwij/sxQDwYQj4MIo97MUA7GEI9jCKP+zFAOxhCPYwij/sxQDqYQj1MIo/7MUA6GEI9DCKP+zFAOphCPUwij/sxQDsYQj2MIo/7MUA7mEI9zCKP+zFAPBhCPgwij/sxQDyYQj5MJo/7MUA8mEI+TCaP+zFAPJhCPkwmj/sxQDyYQj5MJo/7MUA8mEI+TCaP+zFAPJhCPkwmj/sxQDyYQj5MJo/7MUA8mEI+TCaP+zFAPJhCPkwmj/sxQDyYQj5MJo/7MUA8mHoB8QNf9iLQZ8Qp98QN/xhLwZ9RZx+Rtzwh70Y9CFx+iVxwx/2YtC3xOnHxA1/2ItBnxOffU+cP+zFwC+KE/8z/GEvBn1UnH5V3PCHvRj0XXH6YXHDH/Zi0KfF6bfFDX/Yi0FfFyfow1j+sBcD0Ich6MNY/rAXA9CHIejDWP6wFwPQhyHow1j+sBcD0Ich6MOMGz8EPG3FAPZhCPswlq06MIB8GEI+jGWrDgwgH4aQD2PZqgMDwIch4MNYturAAO5hCPcwlq06MAB7GII9jOOrDgzAHoZgD+P4qgMDqIch1MM4vurAAOphCPUwjq86MIB6GEI9jOOrDgygHoZQD+P4qgMDqIch1MM4vurAAOphCPUwjq86MIB6GEI9jOOrDgygHoZQD+P4qgMDqIch1MN0fNWBAdTDEOphOr7qwADqYQj1MB1fdWAA9TCEepiOrzowgHoYQj1Mx1cdGEA9DKEepuOrDgygHoZQD+MpBj5txQDqYQj1MOMXy/HcDaiHIdTDdOxxGwZAD0Ogh+n4w4YMgB6GQA8zHnWFB28APQyBHmb8eDkOHQD0MAR6mDW/580A6GEI9DDjBz5w6AGghyHQw4wf+MChL4AehkAP4yEGkzkA0MMQ6GHWbNGBAczDEOZh1nzRgQHMwxDmYdZ80YEBzMMQ5mHWfNGBAczDEOZhV3zRgQXIwxLkYcfPmsPeZwHzsIR5WA8xcObAAuhhCfSw43fNoftbQD0soR52/L4HdF8LsIcl2MN6joEzBxZwD0u4h/UcA2cOLOAelnAP6zkGzhxYwD0s4R7WcwycObCAe1jCPaznGDhzYAH3sIR72PG75tj/APewhHvY8bvm2P8A97CEe9jxu+bY/wD3sIR72PG75tj/APewhHvY8fse2P8A97CEe1jPMXDmwALuYQn3sJ5j4MyBBdzDEu5hPcfAmQMLuIcl3MN6joEzBxZwD0u4h/UcA2cOLOAelnAPO37XHPsf4B6WcA87ftcc+x/gHpZwDzt+1xxmDizgHpZwDzt+1xz7HwAfloAP60EGzhxYAD4sAR/WgwycObAAfFgCPqwHGThzYAH4sAR8WA8ycObAAvBhCfiwHmTgzIEF4MMS8GHHPR8CbhqxgHxYQj6s4uM/C8iHJeTDjls+hLw1q3dy3ZEGgAcS9mEVW3VgAfqwBH1YxVYdWEA+LCEfVrFVBxaAD0vAh1Vs1YEF3MMS7mEVX3VgAfewhHtYxVcdWMA9LOEeVvFVBxZwD0u4h/UcA6/9LeAelnAPO+74gGt/C7iHJdzDav6oSQu4hyXcw447PuDa3wLuYQn3sOOOD/z+AfewhHtYzzHw2t8C7mEJ97A6M/oB7mEJ97DjNz5g7sAC7mEJ97Djjg8BN4xZAD4sAR923PIh4IYxC8iHJeTDjns+hEGZUwvQhyXow3qUIQTct2gB+7CEfdhx1wcegQD7sIR9WM8yuAEUwA9L4IflT7yygH1Ywj6sGXPPsGzeAvhhCfyw47fNsT7wQcI+rGcZQsCycQvghyXww3qYgYdwwD4sYR/WswzchQH6sAR92PHMK9yFAfqwBH3Y8cwrPIQD9GEJ+rDjmVd4CAfowxL0YTNnXlmAPixBHzZz5pUF6MMS9GEzZ15ZQD4sIR82c+aVBejDEvRhxzOvBNz3awH8sAR+2PH75tiBAP2whH7YkX5gBwL4wxL8YcdDr/AYDviHJfzDjvxDwn3LFgAQSwCIHQGIFLdGv+sE+QWAgFhCQOyJgOARECAQSxCIHRGIVHASAAzEEgZiRwYi8TQEIIglEMSOEETiaQhQEEsoiB0piIT7ji3AIJZgEDtiEImHccBBLOEg1nMNiBAtwCCWYBDrsQbz+4EXEgpix8994PsBBbGEgtjxcx/wO5kWUBBLKIj1VKPDjx9QEEsoiPVUA3+n0wIKYgkFsZ5q4M9tWEBBLKEg1lMNfOqmBRTEEgpiPdXAFNECCmIJBbGd4/fNW4BBLMEgtuv44ncLQIglIMR6sAET6RZwEEs4iF2PwyAOQwAIsQSE2LXgGb4FJMQSEmLHz35IPBMBFGIJCrHjdz8UHskBC7GEhdjxwx8KJxMADLEEhlgPN5xALM0CGGIJDLHjlz+YaB7gEEtwiB0//aHgERgW8BBLeIj1fIOJ5gEPsYSH2PHbH0w0D4CIJUDEecABo3kHeIgjPMR5vgHNdwCHOIJD3PjpDxSLOEBDHKEhztMNGAo5AEMcgSFupdlY2gEY4ggMcR5u4FjaARjiCAxx4yYQGEs7AEMcgSFu3AQCSykcgCGOwBA3bgKBsbgDMMQRGOLGTSAwFncAhjgCQ9y4CQTG4g7AEEdgiBs3gcBY3AEY4ggMceMmEOw/AIY4AkPc+OUP7D8AhjgCQ9z45Q8YSjsAQxyBIe50AhZMhzhAQxyhIU7wJ6A6QEMcoSFuPAMLjz4O4BBHcIjzeAOPPoCGOEJDnKcbjPnA/wgMcXI8/1mj+cMBGuIIDXHjLhD4/gEMcQSGOA83hILTjwM0xBEa4iQ//gEY4ggMcTIz/gEY4ggMcTIz/gEY4ggMcTIz/gEY4ggMceMRWHj8AjDEERjixhOw8PgFYIgjMMSdvngOc3kOwBBHYIhTmQEQwBBHYIhTmQEQsBBHWIhTmQEQwBBHYIhTmQEQ0BBHaIhTmQEQ4BBHcIgbP3mu4CLUASDiCBBxIxDBuQQHiIgjRMSNRATnEhxAIo4gEXdCIjCX4AATcYSJuJGJKLiQcACKOAJFnIccbgUfAYAijkARp/lT8B2AIo5AETd+9xyP4gCKOAJF3AhFmEkIUBFHqIjzlANPQgCKOAJFnGbz0Q4wEUeYiBuZCOxCAIk4gkScJxy4BwMg4ggQcSMQwT0Y8BBHeIg78RA4hAMc4ggOcSMOwVMAoCGO0BBn+H3ADtAQR2iIM/w+YAdgiCMwxBl+H7ADNMQRGuIMvw/YARjiCAxxht8H7AANcYSGOMPvA3YAhjgCQ5zh9wE7QEMcoSHOsNWoDtAQR2iIG2mIgjkMB3CIIzjEjThEwxyGAzzEER7iMt8AcYCHOMJD3MhDsAMBHuIID3EjD8EOBHiIIzzEWR4JO8BDHOEhzvMNXNDjAA9xhIe4cSsITOE4gEMcwSHO8iUJDuAQR3CI83iDe3/AAwkOcZavh3YAhziCQ5zjtyI5QEMcoSFupCEaZmIdoCGO0BA30hAt4fwPaIgjNMSNNETjVSigIY7QEHfaEQJPYXWAhjhCQ9xIQ5h1HKAhjtAQd6IhOIQANMQRGuJGGqLhObAO0BBHaIjzeIPpBgCHOIJDnMvMw4CHOMJDXMefx+EAD3GEh7iOr4xxgIc4wkPc+Plz3A0AD3GEh7hOZboBACKOABHX6Uw3AETEESLiPOEQGi8GARJxBIm4LrcWAUzEESbiRiai8WoIMBFHmIgbmQjTDQATcYSJOA85cHWjA1DEESjiRijCjAQAijgCRdwIRZiRAEARR6CIW2fYsANQxBEo4kYowvgRgCKOQBE3QhGN13MAijgCRdw6w4YdoCKOUBF3+h46DqoAFXGEirjTB9FxUAWoiCNUxI1fRMd02gEs4ggWcSMWMZAsOYBFHMEi3Xg2loFgpwNgpCNgpBs/im7grNgBNNIRNNKtxkIZ2Bc6AEc6Akc6Tzvwur4DdKQjdKQb6YiBc1oH8EhH8Eg34hHuF8wdsSN8pBv5iIFDagcASUcASTcCEgNHxA4Qko4Qku5ESGBn7AAi6Qgi6U7nZMEBqQOMpCOMpBsZiYGdsQOQpCOQpBshiYWdsQOUpCOUpDsdlQX7UgcwSUcwSTeelQXX+B3AJB3BJN14VpbFfRFwko5wkm7kJBb3RcBJOsJJOjHuGYb1Oh0AJR0BJd0ISizuSwCUdASUdJ58MKigA6ikI6ikE3zdYAdYSUdYSTeyEov7ImAlHWElneQLBzsASzoCS7oRlljclwEs6Qgs6eRYOYj7MsAlHcElXebQrA7gko7gkm7EJRZ3ZcBLOsJLupGXONyVATDpCDDpRmDicFcGxKQjxKQbiYnDXREgk44gk25EJg53RYBMOoJMuhMywV0RMJOOMJNuZCYOd0UATToCTboRmjjcEwA16Qg16caPhjjsyQCbdASbdONHQ2DSsgPYpCPYpBuxicM9AWCTjmCTbsQmDnsywCYdwSbdiE067MkAm3QEm3QjNumwJwNs0hFs0o3YpMOeDLBJ+Ld/3N5sXz71+2P/8M3LQ//55ssff7zZ/LTbH29u/3Vztx3/Talbr3Pz5b9uVHfz5b/+fXsjVur0H93pX6SR438osT79R3e6Zjit3f/HcPba+B9r/y//vg1mjv8cDPR/GyzePDzcvx2Ou+fYnrU52zN8Samyoef+cNg89skv09EvW4dfpsMvW4dfdvodSq7CL9Phl3Xhl6nwy7o6g56edr8cts/bp80+Nmnw5MmmwVerG3t7+fll98tL2tg6bszWNfZy/LDfvW7v45aiR27G+4Yq1Kbmdq/H7e7lkNgnxLndYXddVYOv27tP/f6w3SW/VcpzU1LVtvRz/2vSSPTwlRhvW9vgD6d/kNoETw8u707XDN+lGN1AmuAPJ58ZNv+N/2FO7dj1yXmGUpTxP3SlN79uP4nYbhu9Hxd0ulV9Y+jlGBs5z6rSeQ6H7eG4eUkHkLjDrkI3Oz0gER6QNDp0MxG6mQndLDxWpcNjrXSW43Fz/8H/U/y43NkgoYJDj++3oUniy5ED2uA8NoyKdr0OL9uFl13ppZPe8dfXPn1DLn5DdUPqT5tD/7Z/ituJWgm2nawW5vQPUp7+RYbxXoVRUauT9w6HWo//YW3w+fDrw8VDzeXpMQTn17rK7vsPm9SpIu/UIjhV6Kvr0FdN6KvhBwyfHh7tDh3FTB6wqusxgynkEcpooA0/eu3CwBGenbZh4AgzinPBw4MxMjzNdZ073j/1mz2YJOPHUzke3u8e+p+edvc/J04WdZXKVp5fn/pj6ql6FfmYrXzMu5fDcf92f9wlM2T0w8KwGV5/8ERhbHDb4AedC257ehk6DLo6vP7hTPeT/05zXN1kfr97OZIhxsXPLQx6Q832aHHoIa5NgYw4Uce1prIbjQ3RocR28YhYN5I8bI6bpI113Ebd73ro+9dD3yce10VvWJhpdK6bGh/695u3p+RVDIFxFAXVTRyndu6G4fKOdvYoIJanCSkMi8KdurYMobIKAaUKs5gOY58OzmjCv5i2n3n3vHvoU9uikDbEICGQFU4H20KIHmZhFQZDHUZqE8J409WNHg/94X6/9XFE8vRlHOOFrupCV3VdXWDRfz72Lw/9Q+Io0WsNzYYBIKxKZHgd4SGoEGZoFVYlYS1jwjrFhmjeBvdzYWxw4bE4XRebv98MA9i2T7ru2sWjc13Pfd8f7z/M3vfwzahzU65uiD43lVilEqvClBpmsXWYxcLgqsKzVWFw1SFuMOH12lWlRdunY58M8+v4d8nKZ+1b+efb5ml7/HX2qFTcYnCUVei36+AoIeRRoWur0Ld1cC8TXMeu6rrr+/3ueRhJhoKTaI6IB7pad9rvnsM/0jAujmp1Xbcamnvcvk+aiacCXTcV+GZ2u8enHqyyu6hB2VU6+373vH3ePM5C1XiO0XVzzNDWx9f+MbFpFbdTNyUM7bw+JM+qi2d5UxfW+GZeUmtk3Exlj9nvng+f0mZU3Ey9Zx6379NfpeN26kb/oZ1f+p9ek3ZM3E7li397uZ9NISYyqAurjG5V12Me+5d+vxnavN+9vN8+piFUF4dQdX3msU/Ci/U6Hqiqm9g8kXF8FTdT1+0e++NPvx76/aftfRpur0TcWF03iRsD04yMW6zrMI/DNDNbKqn4eYVc2yqEKOsQooQZXIUoRoUZXIcwxoQQy67qnHQwaLZSGr7ZHc0LdZ14aimNyFc6bqquIz/2x1M055/6Gx0+46VTyDwJEdIoYUUhQ+ZJhQWkComV4VuJp+e1Ds+rrjM+9seZJ6xjRxDVjjCPNnTso2HhJsJSLmTdZFgYKxnm5bC21yHrZqb1VfWQcLKHuKaOf1uIe0KST4T1vLQhPyWDj4bYSIf1vAkpCVuZOHvsj3Hckj6p2EFDvBJSCCJkEaUNGQ4ZXCHENNqF5UUI62xltmic1JNBPYoypJxyjZUdkIsRhnOQzgNxZdQxtgbzynGSoHJeHls77nbpwCej7mddZVP73T+TPrOKI6AQGleGQkNjMD0bL39WdWP8Nlk/DcVp56xMGFXduu5Hbl9e39J1doxTXHANF0YfF/ptt6obNbzA3f3ukKro6He7dd0zPDd199rv7467n/s0wrBxhCHquuzY6OH+Q/+8SS2ME6pdnS+PjXnD0tcsTex/07OsG+h8q7u34+vbETWt46Yrn+Thp36z7/ebt+OHJO8QmRlmoDABiWC2DAO1ColaFRICOizLTXAcU9k/tofpn+KFQhxS67qZeHvwS46kmTikrqQz28PT7n6TZmWi8Wiax06Dc+gW6ymDNAGnsCTVE3kK94ZVuRJhkeomFjUllyZ6EvKacvqPME11E6aq/W10QRavFSqJxscDQXfRwzk9i5Obh/AwBDcyrMll8C01/coQDJqQbzMBztpAjWxY27uwiHDhkbjKBfPP/a93+35z2L1sXx7vQOLXRq86hAXCTe9tyvPVyT09pVn9KGZSE0fs6gaDp6fnD/3mod+nSZ9obgrPKbAqETi3DGGHElPCMETjIewwIRoy6zo3eHp63r4c+/37DVm9yDiv7erGz6enZzBJquR5nX5UQFCim4alEDAFJqC6EDvbkPIJQZWpnHKenp5PiykybcdIXtRNtbPxZChojibBOl963ux/fiAVAnFpR2Ujn+/YuSrG0J2oe3FDg+MMhVqM0bELCK8TddHO0PTxw/bl56GrzhuPU0khWAk+ElYdMqw+ZRimlZm4UfCIkEQ3IcFvJ6IYVrEu5JhdSJq6yiDX/4j5g4nzV2EIC/NGGO9lmIhlGIhUGAB1mIlNmInNVDcw5W7DqOvCiOrC8OvCeNBVrgBRyB93TlvZq07tgHF3qAqOnK9uxj81F+DCqVnv37QyJF72nH58WF6JMOXKsLxSIZ+hQspYB7cxwYtNZWR4snG/S9dhQxFz9Hvrpu9TW2SxFLlSmBbD4jcM7jJEDDL8aBVcW4foxISgxISgzk558tCfXOgrLnQsFwZfF+iLq6wsGhbK6ROJC1Zk3dA6z3TFgWPoqyE0C7OBDC4gQ75Khc6igw+Yaa4PoZmd8lWhh7rQ+1zoqs6sQs86aXWy0pdneYN1nDdoeR40yFPRM5kYeyBiIixoZTBdhRIXFUqmtJ1SdmHcqVzBeHuOmzRra2OgKypHDp9c25OsZky8bINFsyydSBYKp3cZwqjw62UIImRI26nQMXQIrHRAUyaMISaMIbYyaDxbOKsYsip+cnU/2F+f9JC40ik4t1NT953Qbpj8KhcXu6enDVlKr+IylimMrxw5x/ZQ7iR2aFtZi7p77V8229S6KIQwqykXVhedjO1B66KX5Cpf0tjaLHsVI11XWU2KTEqI0GlcCmF0qJaRoW/LkLxVYZzTIdFvQqRlpnRTmDWcnuKY6T+CA1WWMp7iyFnKyMTVR7Wv+9yWzxmdF30gexTX4VVWJNP2Qatxgq+yhIuPpOMUhJ26aGW18utmv3nuj2TpKOJCGBtGKVdZ+vO62R96XJ8Z5yVDeCDCqCrC6lSG2VgFx1NhNtZh6DVhNLWVGVJi1QFEmjouhg6dIPQKGYYBGQZ9FWpBVRj0deg6JnQLW5kS9dYhk6KXG9ICIixmZZipZeipKpQoqtBTdZipTUBMtnJtGpt0/+HtJSnlijOhodZJhOhAhiSMDNVwKuTEVGAtOtROmoCIbGVo7+1C8W6cPw7FTiLUX8mwTpN2StOF9xgSdDrk5UxIx9nK8Hu0iUac8Vo+ZEFFKDOSYe0lw2CowuJShY6gQ5rGhGWZFXXDhTcIjPfxgBkSbCIkZmWI4WVY76oQFOrg/XpKLIb+ayvjNG9SuiUiLiM4CQV3FcF1ZOhlMriOCmsXHVzahJWyCet8OxHRcLELUbsLKx5XWZAxGr7vD6+7l0MaMMWVOZ1qeDXpeBu9lGmmPf3Wk61h/BOrKfM4pfJCrwsjogwDhRJT3B5cLEzOKnQ/PSUO7LSOnYqhwlPtpgTghLjC2s40jL4hZZM4ZLRIDStpMRX3hlWUDDkMFR6IDmOfDqsUE4ZsWxlZJTbNRjgTRZLhGYmpWD7ENjKsiFVYAurwhnR4MWbK51byp9GwYaafrUZMNDGHdbqYSiGDi8uJGYRuoMPwp6d3OW1gqMznnazapUvRuIInrJTFlMwPKx0ZYggV0lM6DH86uJIJY7atzAKeDZq/vJithZl8yviFbiGn3hAiVB1GQB0whAkDt61MiHmr5u8tDtLCEDdl8cLwJcPwpUKuQYceoEOa0kyF05UL/9fN/rjdPB2O+37zDEexuHDHmeqfSYvWY+hYWcF3qoyYF4nGnD+EFWHIEmEGlyHakWEGVyFy1CGqMCHzZUJobKeiknCxC27qQvbHhc7RqbrJbVpEpA82+hWusrZq37/f94cEwa7jqDmMjVrVhSan9nByI95nFCZKMWUEw6OT4dGpMMXoMP7pkIA2YfC2lVmtff+4PdC3Hu+2sZXJhX3/vPvUo/KveHda5eSM+kdcjxFcRkx5wpDNkxMKDLG/DsOxDqlVEyYSGyYSV5mbmiWJ49jJTqFYJTA69C9JvUiMrsLqRkwprbA+kyFbqkLcpUPH0SE+MWFSspXrWlTkGJc7T3stQn8c/3c98ZwwCYallwxLDRnegwzhtArjrJqWcGHA1WGQ0SH01+GXmPCGTZjWzFQMFuYuO1WFhZbdRJ6C6W7aKjeFpCHMdiHq7Sq3DJye2iyBF5ctd5XLdQgz4/2ytrJu6LB726dv0sUBfuV2wHGeSn9V1MrpiYWFkQiPUE6rubBSUXZKvE5vcnqBwUunsrlwsZugW4h+XYgAnK10af8Lti+PQ2xyT8uB45o/51pahHN3nLNwlUj9cNxv70mFV1x7VbmT6vD2+rrbHw93m7eH7e5uXjYW7xvrKnd6kEbHtFc6OsQ4sLIse2o1VKHfDe+FTtfx1r6uslJ7avl1qP4/3t1v7j/M2o2pRmXR9tQujizi8qWuMrKYmvy0pXvsRVzt01WWSk/t/dL/dHcYNqt+SNuMiwYqa6YPvx6OfdL7Y/obOqmYEtJTdi4sh1UI/HQYgHWYK0xYn9jaUdYbc7c97Valz0zEs6atc8Tj5jEdaOOtQl1lSHfsn1+HnQ9v+3QsiOwJKZ2QrRFumh7DEmM1TYbhiYUwwoSnatw0v4W12cRyJ6IcgnBn6nq4X3EnBD4uog0TYUgFhPW1DJkoFdbXyk4zdVirhSSSCcGAnfKwE3GY0G+IE1yIXJ2tcwuUxoiZfXieYspcTQnbYJeaooww3eiQWjFhvWrDetWGcMuFl+gqN5Med/zerdhZKuvMZhAqzk67hkb86Es4lIjnsTD1Vlafo4nWJiwzvJGwoplS1SHyUiFI0MGxdcg3mrBgtxPL7+pG3Fm2JN4yOxU9BeNC0kaGHJcKSRsVvFeHVJIRUwgajJvy6FNUOeHqEBs5UzepHXev86IDEW+t6yq3kh93x80T4Hbx26488WNsCgCwOFPXVZ70QPl5HFSIkDRzIVPQVW5DenuBq9q4EtNW1lDO6xDiPSxugvMhEHbV7abGxXUD01pqWkJN9CTkXlSYI3QYR3XIvZiwrLGVodOnfr99/+v97uWln02ucdl+8GcR3FhODCUsB1WYAnTweR0WfSYs2W1l5PXLfvP62j/MBrs49dpV7n6O2potBOIcZVe5b+szqVOIqwVNWHraygNGPj+ntCrKlpTv/8ftzev2tX/avvQ3X/74j3//+/8CtOv5qQ=="; \ No newline at end of file diff --git a/public/docs/classes/APIv1.html b/public/docs/classes/APIv1.html index 97bab56..b33003e 100644 --- a/public/docs/classes/APIv1.html +++ b/public/docs/classes/APIv1.html @@ -1,5 +1,5 @@ APIv1 | @themaximalist/llm.js
                                                                                                                                                          @themaximalist/llm.js
                                                                                                                                                            Preparing search index...

                                                                                                                                                            Class APIv1

                                                                                                                                                            OpenAI API v1 Compatible Base Class

                                                                                                                                                            -

                                                                                                                                                            Hierarchy (View Summary)

                                                                                                                                                            Index

                                                                                                                                                            Constructors

                                                                                                                                                            Hierarchy (View Summary)

                                                                                                                                                            Index

                                                                                                                                                            Constructors

                                                                                                                                                            Properties

                                                                                                                                                            baseUrl: string
                                                                                                                                                            extended: boolean
                                                                                                                                                            json?: boolean
                                                                                                                                                            max_thinking_tokens?: number
                                                                                                                                                            max_tokens: number
                                                                                                                                                            messages: Message[]
                                                                                                                                                            model: string
                                                                                                                                                            modelUsage: ModelUsage
                                                                                                                                                            options: Options
                                                                                                                                                            qualityFilter: QualityFilter
                                                                                                                                                            service: string
                                                                                                                                                            stream: boolean
                                                                                                                                                            temperature?: number
                                                                                                                                                            think: boolean
                                                                                                                                                            tools?: Tool[]
                                                                                                                                                            DEFAULT_BASE_URL: string = ""
                                                                                                                                                            DEFAULT_MODEL: string = ""
                                                                                                                                                            isBearerAuth: boolean = true
                                                                                                                                                            isLocal: boolean = false
                                                                                                                                                            KEY_REASONING_CONTENT: string = "reasoning_content"
                                                                                                                                                            parsers: parsers = parsers
                                                                                                                                                            service: string = "openai"

                                                                                                                                                            Accessors

                                                                                                                                                            • get apiKey(): undefined | null | string

                                                                                                                                                              Returns undefined | null | string

                                                                                                                                                            • get chatUrl(): string

                                                                                                                                                              Returns string

                                                                                                                                                            • get isLocal(): boolean

                                                                                                                                                              Returns boolean

                                                                                                                                                            • get llmHeaders(): Record<string, string>

                                                                                                                                                              Returns Record<string, string>

                                                                                                                                                            • get llmOptions(): Options

                                                                                                                                                              Returns Options

                                                                                                                                                            • get modelsUrl(): string

                                                                                                                                                              Returns string

                                                                                                                                                            • get parsers(): Parsers

                                                                                                                                                              Returns Parsers

                                                                                                                                                            Methods

                                                                                                                                                            • Returns void

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: string

                                                                                                                                                              Returns void

                                                                                                                                                            • Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                            • Returns Promise<void>

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: string

                                                                                                                                                              Returns void

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: string

                                                                                                                                                              Returns void

                                                                                                                                                            • Parameters

                                                                                                                                                              Returns void

                                                                                                                                                            • Returns Promise<boolean>

                                                                                                                                                            +

                                                                                                                                                            Constructors

                                                                                                                                                            Properties

                                                                                                                                                            baseUrl: string
                                                                                                                                                            extended: boolean
                                                                                                                                                            json?: boolean
                                                                                                                                                            max_thinking_tokens?: number
                                                                                                                                                            max_tokens: number
                                                                                                                                                            messages: Message[]
                                                                                                                                                            model: string
                                                                                                                                                            modelUsage: ModelUsage
                                                                                                                                                            options: Options
                                                                                                                                                            qualityFilter: QualityFilter
                                                                                                                                                            service: string
                                                                                                                                                            stream: boolean
                                                                                                                                                            temperature?: number
                                                                                                                                                            think: boolean
                                                                                                                                                            tools?: Tool[]
                                                                                                                                                            DEFAULT_BASE_URL: string = ""
                                                                                                                                                            DEFAULT_MODEL: string = ""
                                                                                                                                                            isBearerAuth: boolean = true
                                                                                                                                                            isLocal: boolean = false
                                                                                                                                                            KEY_REASONING_CONTENT: string = "reasoning_content"
                                                                                                                                                            MessageExtendedContentInputKey: string = "text"
                                                                                                                                                            parsers: parsers = parsers
                                                                                                                                                            service: string = "openai"

                                                                                                                                                            Accessors

                                                                                                                                                            • get apiKey(): undefined | null | string

                                                                                                                                                              Returns undefined | null | string

                                                                                                                                                            • get chatUrl(): string

                                                                                                                                                              Returns string

                                                                                                                                                            • get isLocal(): boolean

                                                                                                                                                              Returns boolean

                                                                                                                                                            • get llmHeaders(): Record<string, string>

                                                                                                                                                              Returns Record<string, string>

                                                                                                                                                            • get llmOptions(): Options

                                                                                                                                                              Returns Options

                                                                                                                                                            • get modelsUrl(): string

                                                                                                                                                              Returns string

                                                                                                                                                            • get parsers(): Parsers

                                                                                                                                                              Returns Parsers

                                                                                                                                                            Methods

                                                                                                                                                            • Returns void

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: string

                                                                                                                                                              Returns void

                                                                                                                                                            • Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              Returns { image_url: { detail: string; url: string }; type: string }

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: any

                                                                                                                                                              Returns any[]

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                            • Returns Promise<void>

                                                                                                                                                            • Parameters

                                                                                                                                                              • data: any

                                                                                                                                                              Returns string

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: string

                                                                                                                                                              Returns void

                                                                                                                                                            • Parameters

                                                                                                                                                              • content: string

                                                                                                                                                              Returns void

                                                                                                                                                            • Parameters

                                                                                                                                                              Returns void

                                                                                                                                                            • Returns Promise<boolean>

                                                                                                                                                            diff --git a/public/docs/classes/Anthropic.html b/public/docs/classes/Anthropic.html index f7a65f8..df42596 100644 --- a/public/docs/classes/Anthropic.html +++ b/public/docs/classes/Anthropic.html @@ -1,4 +1,4 @@ -Anthropic | @themaximalist/llm.js
                                                                                                                                                            @themaximalist/llm.js
                                                                                                                                                              Preparing search index...

                                                                                                                                                              Class Anthropic

                                                                                                                                                              Hierarchy (View Summary)

                                                                                                                                                              Index

                                                                                                                                                              Constructors

                                                                                                                                                              constructor +Anthropic | @themaximalist/llm.js
                                                                                                                                                              @themaximalist/llm.js
                                                                                                                                                                Preparing search index...

                                                                                                                                                                Class Anthropic

                                                                                                                                                                Hierarchy (View Summary)

                                                                                                                                                                Index

                                                                                                                                                                Constructors

                                                                                                                                                                Properties

                                                                                                                                                                baseUrl: string
                                                                                                                                                                extended: boolean
                                                                                                                                                                json?: boolean
                                                                                                                                                                max_thinking_tokens?: number
                                                                                                                                                                max_tokens: number
                                                                                                                                                                messages: Message[]
                                                                                                                                                                model: string
                                                                                                                                                                modelUsage: ModelUsage
                                                                                                                                                                options: Options
                                                                                                                                                                qualityFilter: QualityFilter
                                                                                                                                                                service: string
                                                                                                                                                                stream: boolean
                                                                                                                                                                temperature?: number
                                                                                                                                                                think: boolean
                                                                                                                                                                tools?: Tool[]
                                                                                                                                                                API_VERSION: string = "2023-06-01"
                                                                                                                                                                DEFAULT_BASE_URL: string = "https://api.anthropic.com/v1"
                                                                                                                                                                DEFAULT_MODEL: string = "claude-opus-4-20250514"
                                                                                                                                                                isBearerAuth: boolean = false
                                                                                                                                                                isLocal: boolean = false
                                                                                                                                                                parsers: parsers = parsers
                                                                                                                                                                service: string = "anthropic"

                                                                                                                                                                Accessors

                                                                                                                                                                • get apiKey(): undefined | null | string

                                                                                                                                                                  Returns undefined | null | string

                                                                                                                                                                • get chatUrl(): string

                                                                                                                                                                  Returns string

                                                                                                                                                                • get isLocal(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get llmHeaders(): { "anthropic-version": string } & Record<string, string>

                                                                                                                                                                  Returns { "anthropic-version": string } & Record<string, string>

                                                                                                                                                                • get llmOptions(): Options

                                                                                                                                                                  Returns Options

                                                                                                                                                                • get modelsUrl(): string

                                                                                                                                                                  Returns string

                                                                                                                                                                • get parsers(): Parsers

                                                                                                                                                                  Returns Parsers

                                                                                                                                                                Methods

                                                                                                                                                                • Returns void

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: string

                                                                                                                                                                  Returns void

                                                                                                                                                                • Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • chunk: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • chunk: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns null | { input_tokens: number; output_tokens: number }

                                                                                                                                                                • Returns Promise<void>

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: string

                                                                                                                                                                  Returns void

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: string

                                                                                                                                                                  Returns void

                                                                                                                                                                • Parameters

                                                                                                                                                                  Returns void

                                                                                                                                                                • Returns Promise<boolean>

                                                                                                                                                                +

                                                                                                                                                                Constructors

                                                                                                                                                                Properties

                                                                                                                                                                baseUrl: string
                                                                                                                                                                extended: boolean
                                                                                                                                                                json?: boolean
                                                                                                                                                                max_thinking_tokens?: number
                                                                                                                                                                max_tokens: number
                                                                                                                                                                messages: Message[]
                                                                                                                                                                model: string
                                                                                                                                                                modelUsage: ModelUsage
                                                                                                                                                                options: Options
                                                                                                                                                                qualityFilter: QualityFilter
                                                                                                                                                                service: string
                                                                                                                                                                stream: boolean
                                                                                                                                                                temperature?: number
                                                                                                                                                                think: boolean
                                                                                                                                                                tools?: Tool[]
                                                                                                                                                                API_VERSION: string = "2023-06-01"
                                                                                                                                                                DEFAULT_BASE_URL: string = "https://api.anthropic.com/v1"
                                                                                                                                                                DEFAULT_MODEL: string = "claude-opus-4-20250514"
                                                                                                                                                                isBearerAuth: boolean = false
                                                                                                                                                                isLocal: boolean = false
                                                                                                                                                                MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                parsers: parsers = parsers
                                                                                                                                                                service: string = "anthropic"

                                                                                                                                                                Accessors

                                                                                                                                                                • get apiKey(): undefined | null | string

                                                                                                                                                                  Returns undefined | null | string

                                                                                                                                                                • get chatUrl(): string

                                                                                                                                                                  Returns string

                                                                                                                                                                • get isLocal(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get llmHeaders(): { "anthropic-version": string } & Record<string, string>

                                                                                                                                                                  Returns { "anthropic-version": string } & Record<string, string>

                                                                                                                                                                • get llmOptions(): Options

                                                                                                                                                                  Returns Options

                                                                                                                                                                • get modelsUrl(): string

                                                                                                                                                                  Returns string

                                                                                                                                                                • get parsers(): Parsers

                                                                                                                                                                  Returns Parsers

                                                                                                                                                                Methods

                                                                                                                                                                • Returns void

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: string

                                                                                                                                                                  Returns void

                                                                                                                                                                • Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: any

                                                                                                                                                                  Returns any[]

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • chunk: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • chunk: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns null | { input_tokens: number; output_tokens: number }

                                                                                                                                                                • Returns Promise<void>

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: any

                                                                                                                                                                  Returns string

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: string

                                                                                                                                                                  Returns void

                                                                                                                                                                • Parameters

                                                                                                                                                                  • content: string

                                                                                                                                                                  Returns void

                                                                                                                                                                • Parameters

                                                                                                                                                                  Returns void

                                                                                                                                                                • Returns Promise<boolean>

                                                                                                                                                                diff --git a/public/docs/classes/Attachment.html b/public/docs/classes/Attachment.html index 56db947..8a949e9 100644 --- a/public/docs/classes/Attachment.html +++ b/public/docs/classes/Attachment.html @@ -5,6 +5,7 @@

                                                                                                                                                                Accessors

                                                                                                                                                                Methods

                                                                                                                                                                Constructors

                                                                                                                                                                Properties

                                                                                                                                                                contentType: string
                                                                                                                                                                data: string

                                                                                                                                                                Accessors

                                                                                                                                                                • get content(): {
                                                                                                                                                                      source: | {
                                                                                                                                                                          data?: undefined;
                                                                                                                                                                          media_type?: undefined;
                                                                                                                                                                          type: string;
                                                                                                                                                                          url: string;
                                                                                                                                                                      }
                                                                                                                                                                      | { data: string; media_type: string; type: string; url?: undefined };
                                                                                                                                                                      type: AttachmentType;
                                                                                                                                                                  }

                                                                                                                                                                  Returns {
                                                                                                                                                                      source:
                                                                                                                                                                          | {
                                                                                                                                                                              data?: undefined;
                                                                                                                                                                              media_type?: undefined;
                                                                                                                                                                              type: string;
                                                                                                                                                                              url: string;
                                                                                                                                                                          }
                                                                                                                                                                          | { data: string; media_type: string; type: string; url?: undefined };
                                                                                                                                                                      type: AttachmentType;
                                                                                                                                                                  }

                                                                                                                                                                • get isDocument(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get isImage(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get source(): | {
                                                                                                                                                                      data?: undefined;
                                                                                                                                                                      media_type?: undefined;
                                                                                                                                                                      type: string;
                                                                                                                                                                      url: string;
                                                                                                                                                                  }
                                                                                                                                                                  | { data: string; media_type: string; type: string; url?: undefined }

                                                                                                                                                                  Returns
                                                                                                                                                                      | {
                                                                                                                                                                          data?: undefined;
                                                                                                                                                                          media_type?: undefined;
                                                                                                                                                                          type: string;
                                                                                                                                                                          url: string;
                                                                                                                                                                      }
                                                                                                                                                                      | { data: string; media_type: string; type: string; url?: undefined }

                                                                                                                                                                Methods

                                                                                                                                                                • Parameters

                                                                                                                                                                  • url: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • url: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                +

                                                                                                                                                                Constructors

                                                                                                                                                                Properties

                                                                                                                                                                contentType: string
                                                                                                                                                                data: string

                                                                                                                                                                Accessors

                                                                                                                                                                • get content(): {
                                                                                                                                                                      source: | {
                                                                                                                                                                          data?: undefined;
                                                                                                                                                                          media_type?: undefined;
                                                                                                                                                                          type: string;
                                                                                                                                                                          url: string;
                                                                                                                                                                      }
                                                                                                                                                                      | { data: string; media_type: string; type: string; url?: undefined };
                                                                                                                                                                      type: AttachmentType;
                                                                                                                                                                  }

                                                                                                                                                                  Returns {
                                                                                                                                                                      source:
                                                                                                                                                                          | {
                                                                                                                                                                              data?: undefined;
                                                                                                                                                                              media_type?: undefined;
                                                                                                                                                                              type: string;
                                                                                                                                                                              url: string;
                                                                                                                                                                          }
                                                                                                                                                                          | { data: string; media_type: string; type: string; url?: undefined };
                                                                                                                                                                      type: AttachmentType;
                                                                                                                                                                  }

                                                                                                                                                                • get isDocument(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get isImage(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get isURL(): boolean

                                                                                                                                                                  Returns boolean

                                                                                                                                                                • get source(): | {
                                                                                                                                                                      data?: undefined;
                                                                                                                                                                      media_type?: undefined;
                                                                                                                                                                      type: string;
                                                                                                                                                                      url: string;
                                                                                                                                                                  }
                                                                                                                                                                  | { data: string; media_type: string; type: string; url?: undefined }

                                                                                                                                                                  Returns
                                                                                                                                                                      | {
                                                                                                                                                                          data?: undefined;
                                                                                                                                                                          media_type?: undefined;
                                                                                                                                                                          type: string;
                                                                                                                                                                          url: string;
                                                                                                                                                                      }
                                                                                                                                                                      | { data: string; media_type: string; type: string; url?: undefined }

                                                                                                                                                                Methods

                                                                                                                                                                • Parameters

                                                                                                                                                                  • url: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • url: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                • Parameters

                                                                                                                                                                  • data: string

                                                                                                                                                                  Returns Attachment

                                                                                                                                                                diff --git a/public/docs/classes/DeepSeek.html b/public/docs/classes/DeepSeek.html index 2bb6432..0765889 100644 --- a/public/docs/classes/DeepSeek.html +++ b/public/docs/classes/DeepSeek.html @@ -1,4 +1,4 @@ -DeepSeek | @themaximalist/llm.js
                                                                                                                                                                @themaximalist/llm.js
                                                                                                                                                                  Preparing search index...

                                                                                                                                                                  Class DeepSeek

                                                                                                                                                                  Hierarchy (View Summary)

                                                                                                                                                                  Index

                                                                                                                                                                  Constructors

                                                                                                                                                                  constructor +DeepSeek | @themaximalist/llm.js
                                                                                                                                                                  @themaximalist/llm.js
                                                                                                                                                                    Preparing search index...

                                                                                                                                                                    Class DeepSeek

                                                                                                                                                                    Hierarchy (View Summary)

                                                                                                                                                                    Index

                                                                                                                                                                    Constructors

                                                                                                                                                                    Properties

                                                                                                                                                                    baseUrl: string
                                                                                                                                                                    extended: boolean
                                                                                                                                                                    json?: boolean
                                                                                                                                                                    max_thinking_tokens?: number
                                                                                                                                                                    max_tokens: number
                                                                                                                                                                    messages: Message[]
                                                                                                                                                                    model: string
                                                                                                                                                                    modelUsage: ModelUsage
                                                                                                                                                                    options: Options
                                                                                                                                                                    qualityFilter: QualityFilter
                                                                                                                                                                    service: string
                                                                                                                                                                    stream: boolean
                                                                                                                                                                    temperature?: number
                                                                                                                                                                    think: boolean
                                                                                                                                                                    tools?: Tool[]
                                                                                                                                                                    DEFAULT_BASE_URL: string = "https://api.deepseek.com/v1/"
                                                                                                                                                                    DEFAULT_MODEL: string = "deepseek-chat"
                                                                                                                                                                    isBearerAuth: boolean = true
                                                                                                                                                                    isLocal: boolean = false
                                                                                                                                                                    KEY_REASONING_CONTENT: string = "reasoning_content"
                                                                                                                                                                    parsers: parsers = parsers
                                                                                                                                                                    service: string = "deepseek"

                                                                                                                                                                    Accessors

                                                                                                                                                                    • get apiKey(): undefined | null | string

                                                                                                                                                                      Returns undefined | null | string

                                                                                                                                                                    • get chatUrl(): string

                                                                                                                                                                      Returns string

                                                                                                                                                                    • get isLocal(): boolean

                                                                                                                                                                      Returns boolean

                                                                                                                                                                    • get llmHeaders(): Record<string, string>

                                                                                                                                                                      Returns Record<string, string>

                                                                                                                                                                    • get llmOptions(): Options

                                                                                                                                                                      Returns Options

                                                                                                                                                                    • get modelsUrl(): string

                                                                                                                                                                      Returns string

                                                                                                                                                                    • get parsers(): Parsers

                                                                                                                                                                      Returns Parsers

                                                                                                                                                                    Methods

                                                                                                                                                                    • Returns void

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: string

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                    • Returns Promise<void>

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: string

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: string

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Parameters

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Returns Promise<boolean>

                                                                                                                                                                    +

                                                                                                                                                                    Constructors

                                                                                                                                                                    Properties

                                                                                                                                                                    baseUrl: string
                                                                                                                                                                    extended: boolean
                                                                                                                                                                    json?: boolean
                                                                                                                                                                    max_thinking_tokens?: number
                                                                                                                                                                    max_tokens: number
                                                                                                                                                                    messages: Message[]
                                                                                                                                                                    model: string
                                                                                                                                                                    modelUsage: ModelUsage
                                                                                                                                                                    options: Options
                                                                                                                                                                    qualityFilter: QualityFilter
                                                                                                                                                                    service: string
                                                                                                                                                                    stream: boolean
                                                                                                                                                                    temperature?: number
                                                                                                                                                                    think: boolean
                                                                                                                                                                    tools?: Tool[]
                                                                                                                                                                    DEFAULT_BASE_URL: string = "https://api.deepseek.com/v1/"
                                                                                                                                                                    DEFAULT_MODEL: string = "deepseek-chat"
                                                                                                                                                                    isBearerAuth: boolean = true
                                                                                                                                                                    isLocal: boolean = false
                                                                                                                                                                    KEY_REASONING_CONTENT: string = "reasoning_content"
                                                                                                                                                                    MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                    parsers: parsers = parsers
                                                                                                                                                                    service: string = "deepseek"

                                                                                                                                                                    Accessors

                                                                                                                                                                    • get apiKey(): undefined | null | string

                                                                                                                                                                      Returns undefined | null | string

                                                                                                                                                                    • get chatUrl(): string

                                                                                                                                                                      Returns string

                                                                                                                                                                    • get isLocal(): boolean

                                                                                                                                                                      Returns boolean

                                                                                                                                                                    • get llmHeaders(): Record<string, string>

                                                                                                                                                                      Returns Record<string, string>

                                                                                                                                                                    • get llmOptions(): Options

                                                                                                                                                                      Returns Options

                                                                                                                                                                    • get modelsUrl(): string

                                                                                                                                                                      Returns string

                                                                                                                                                                    • get parsers(): Parsers

                                                                                                                                                                      Returns Parsers

                                                                                                                                                                    Methods

                                                                                                                                                                    • Returns void

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: string

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      Returns { image_url: { detail: string; url: string }; type: string }

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: any

                                                                                                                                                                      Returns any[]

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                    • Returns Promise<void>

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • data: any

                                                                                                                                                                      Returns string

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: string

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Parameters

                                                                                                                                                                      • content: string

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Parameters

                                                                                                                                                                      Returns void

                                                                                                                                                                    • Returns Promise<boolean>

                                                                                                                                                                    diff --git a/public/docs/classes/Google.html b/public/docs/classes/Google.html index 6289924..3fe0f20 100644 --- a/public/docs/classes/Google.html +++ b/public/docs/classes/Google.html @@ -1,4 +1,4 @@ -Google | @themaximalist/llm.js
                                                                                                                                                                    @themaximalist/llm.js
                                                                                                                                                                      Preparing search index...

                                                                                                                                                                      Class Google

                                                                                                                                                                      Hierarchy (View Summary)

                                                                                                                                                                      Index

                                                                                                                                                                      Constructors

                                                                                                                                                                      constructor +Google | @themaximalist/llm.js
                                                                                                                                                                      @themaximalist/llm.js
                                                                                                                                                                        Preparing search index...

                                                                                                                                                                        Class Google

                                                                                                                                                                        Hierarchy (View Summary)

                                                                                                                                                                        Index

                                                                                                                                                                        Constructors

                                                                                                                                                                        Properties

                                                                                                                                                                        baseUrl: string
                                                                                                                                                                        extended: boolean
                                                                                                                                                                        json?: boolean
                                                                                                                                                                        max_thinking_tokens?: number
                                                                                                                                                                        max_tokens: number
                                                                                                                                                                        messages: Message[]
                                                                                                                                                                        model: string
                                                                                                                                                                        modelUsage: ModelUsage
                                                                                                                                                                        options: Options
                                                                                                                                                                        qualityFilter: QualityFilter
                                                                                                                                                                        service: string
                                                                                                                                                                        stream: boolean
                                                                                                                                                                        temperature?: number
                                                                                                                                                                        think: boolean
                                                                                                                                                                        tools?: Tool[]
                                                                                                                                                                        DEFAULT_BASE_URL: string = "https://generativelanguage.googleapis.com/v1beta/"
                                                                                                                                                                        DEFAULT_MODEL: string = "gemini-2.5-flash-preview-05-20"
                                                                                                                                                                        isBearerAuth: boolean = false
                                                                                                                                                                        isLocal: boolean = false
                                                                                                                                                                        parsers: parsers = parsers
                                                                                                                                                                        service: string = "google"

                                                                                                                                                                        Accessors

                                                                                                                                                                        • get apiKey(): undefined | null | string

                                                                                                                                                                          Returns undefined | null | string

                                                                                                                                                                        • get chatUrl(): string

                                                                                                                                                                          Returns string

                                                                                                                                                                        • get isLocal(): boolean

                                                                                                                                                                          Returns boolean

                                                                                                                                                                        • get llmHeaders(): Record<string, string>

                                                                                                                                                                          Returns Record<string, string>

                                                                                                                                                                        • get llmOptions(): Options

                                                                                                                                                                          Returns Options

                                                                                                                                                                        • get modelsUrl(): string

                                                                                                                                                                          Returns string

                                                                                                                                                                        • get parsers(): Parsers

                                                                                                                                                                          Returns Parsers

                                                                                                                                                                        Methods

                                                                                                                                                                        • Returns void

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: string

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • chunk: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • chunk: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                        • Returns Promise<void>

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: string

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: string

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Parameters

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Returns Promise<boolean>

                                                                                                                                                                        +fromGoogleMessage +toGoogleMessage +

                                                                                                                                                                        Constructors

                                                                                                                                                                        Properties

                                                                                                                                                                        baseUrl: string
                                                                                                                                                                        extended: boolean
                                                                                                                                                                        json?: boolean
                                                                                                                                                                        max_thinking_tokens?: number
                                                                                                                                                                        max_tokens: number
                                                                                                                                                                        messages: Message[]
                                                                                                                                                                        model: string
                                                                                                                                                                        modelUsage: ModelUsage
                                                                                                                                                                        options: Options
                                                                                                                                                                        qualityFilter: QualityFilter
                                                                                                                                                                        service: string
                                                                                                                                                                        stream: boolean
                                                                                                                                                                        temperature?: number
                                                                                                                                                                        think: boolean
                                                                                                                                                                        tools?: Tool[]
                                                                                                                                                                        DEFAULT_BASE_URL: string = "https://generativelanguage.googleapis.com/v1beta/"
                                                                                                                                                                        DEFAULT_MODEL: string = "gemini-2.5-flash-preview-05-20"
                                                                                                                                                                        isBearerAuth: boolean = false
                                                                                                                                                                        isLocal: boolean = false
                                                                                                                                                                        MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                        parsers: parsers = parsers
                                                                                                                                                                        service: string = "google"

                                                                                                                                                                        Accessors

                                                                                                                                                                        • get apiKey(): undefined | null | string

                                                                                                                                                                          Returns undefined | null | string

                                                                                                                                                                        • get chatUrl(): string

                                                                                                                                                                          Returns string

                                                                                                                                                                        • get isLocal(): boolean

                                                                                                                                                                          Returns boolean

                                                                                                                                                                        • get llmHeaders(): Record<string, string>

                                                                                                                                                                          Returns Record<string, string>

                                                                                                                                                                        • get llmOptions(): Options

                                                                                                                                                                          Returns Options

                                                                                                                                                                        • get modelsUrl(): string

                                                                                                                                                                          Returns string

                                                                                                                                                                        • get parsers(): Parsers

                                                                                                                                                                          Returns Parsers

                                                                                                                                                                        Methods

                                                                                                                                                                        • Returns void

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: string

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: any

                                                                                                                                                                          Returns any[]

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • chunk: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • chunk: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                        • Returns Promise<void>

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • data: any

                                                                                                                                                                          Returns string

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: string

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Parameters

                                                                                                                                                                          • content: string

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Parameters

                                                                                                                                                                          Returns void

                                                                                                                                                                        • Returns Promise<boolean>

                                                                                                                                                                        diff --git a/public/docs/classes/Groq.html b/public/docs/classes/Groq.html index 24deb30..702940f 100644 --- a/public/docs/classes/Groq.html +++ b/public/docs/classes/Groq.html @@ -1,4 +1,4 @@ -Groq | @themaximalist/llm.js
                                                                                                                                                                        @themaximalist/llm.js
                                                                                                                                                                          Preparing search index...

                                                                                                                                                                          Class Groq

                                                                                                                                                                          Hierarchy (View Summary)

                                                                                                                                                                          Index

                                                                                                                                                                          Constructors

                                                                                                                                                                          constructor +Groq | @themaximalist/llm.js
                                                                                                                                                                          @themaximalist/llm.js
                                                                                                                                                                            Preparing search index...

                                                                                                                                                                            Class Groq

                                                                                                                                                                            Hierarchy (View Summary)

                                                                                                                                                                            Index

                                                                                                                                                                            Constructors

                                                                                                                                                                            Properties

                                                                                                                                                                            baseUrl: string
                                                                                                                                                                            extended: boolean
                                                                                                                                                                            json?: boolean
                                                                                                                                                                            max_thinking_tokens?: number
                                                                                                                                                                            max_tokens: number
                                                                                                                                                                            messages: Message[]
                                                                                                                                                                            model: string
                                                                                                                                                                            modelUsage: ModelUsage
                                                                                                                                                                            options: Options
                                                                                                                                                                            qualityFilter: QualityFilter
                                                                                                                                                                            service: string
                                                                                                                                                                            stream: boolean
                                                                                                                                                                            temperature?: number
                                                                                                                                                                            think: boolean
                                                                                                                                                                            tools?: Tool[]
                                                                                                                                                                            DEFAULT_BASE_URL: string = "https://api.groq.com/openai/v1/"
                                                                                                                                                                            DEFAULT_MODEL: string = "deepseek-r1-distill-llama-70b"
                                                                                                                                                                            isBearerAuth: boolean = true
                                                                                                                                                                            isLocal: boolean = false
                                                                                                                                                                            KEY_REASONING_CONTENT: string = "reasoning"
                                                                                                                                                                            parsers: parsers = parsers
                                                                                                                                                                            service: string = "groq"

                                                                                                                                                                            Accessors

                                                                                                                                                                            • get apiKey(): undefined | null | string

                                                                                                                                                                              Returns undefined | null | string

                                                                                                                                                                            • get chatUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            • get isLocal(): boolean

                                                                                                                                                                              Returns boolean

                                                                                                                                                                            • get llmHeaders(): Record<string, string>

                                                                                                                                                                              Returns Record<string, string>

                                                                                                                                                                            • get llmOptions(): Options

                                                                                                                                                                              Returns Options

                                                                                                                                                                            • get modelsUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            • get parsers(): Parsers

                                                                                                                                                                              Returns Parsers

                                                                                                                                                                            Methods

                                                                                                                                                                            • Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                            • Returns Promise<void>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Returns Promise<boolean>

                                                                                                                                                                            +

                                                                                                                                                                            Constructors

                                                                                                                                                                            Properties

                                                                                                                                                                            baseUrl: string
                                                                                                                                                                            extended: boolean
                                                                                                                                                                            json?: boolean
                                                                                                                                                                            max_thinking_tokens?: number
                                                                                                                                                                            max_tokens: number
                                                                                                                                                                            messages: Message[]
                                                                                                                                                                            model: string
                                                                                                                                                                            modelUsage: ModelUsage
                                                                                                                                                                            options: Options
                                                                                                                                                                            qualityFilter: QualityFilter
                                                                                                                                                                            service: string
                                                                                                                                                                            stream: boolean
                                                                                                                                                                            temperature?: number
                                                                                                                                                                            think: boolean
                                                                                                                                                                            tools?: Tool[]
                                                                                                                                                                            DEFAULT_BASE_URL: string = "https://api.groq.com/openai/v1/"
                                                                                                                                                                            DEFAULT_MODEL: string = "deepseek-r1-distill-llama-70b"
                                                                                                                                                                            isBearerAuth: boolean = true
                                                                                                                                                                            isLocal: boolean = false
                                                                                                                                                                            KEY_REASONING_CONTENT: string = "reasoning"
                                                                                                                                                                            MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                            parsers: parsers = parsers
                                                                                                                                                                            service: string = "groq"

                                                                                                                                                                            Accessors

                                                                                                                                                                            • get apiKey(): undefined | null | string

                                                                                                                                                                              Returns undefined | null | string

                                                                                                                                                                            • get chatUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            • get isLocal(): boolean

                                                                                                                                                                              Returns boolean

                                                                                                                                                                            • get llmHeaders(): Record<string, string>

                                                                                                                                                                              Returns Record<string, string>

                                                                                                                                                                            • get llmOptions(): Options

                                                                                                                                                                              Returns Options

                                                                                                                                                                            • get modelsUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            • get parsers(): Parsers

                                                                                                                                                                              Returns Parsers

                                                                                                                                                                            Methods

                                                                                                                                                                            • Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns { image_url: { detail: string; url: string }; type: string }

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: any

                                                                                                                                                                              Returns any[]

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                            • Returns Promise<void>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Returns Promise<boolean>

                                                                                                                                                                            diff --git a/public/docs/classes/LLM.html b/public/docs/classes/LLM.html index 6574a7f..efaaa54 100644 --- a/public/docs/classes/LLM.html +++ b/public/docs/classes/LLM.html @@ -20,6 +20,7 @@ DEFAULT_MODEL isBearerAuth isLocal +MessageExtendedContentInputKey parsers service

                                                                                                                                                                            Accessors

                                                                                                                                                                            Constructors

                                                                                                                                                                            Properties

                                                                                                                                                                            baseUrl: string
                                                                                                                                                                            extended: boolean
                                                                                                                                                                            json?: boolean
                                                                                                                                                                            max_thinking_tokens?: number
                                                                                                                                                                            max_tokens: number
                                                                                                                                                                            messages: Message[]
                                                                                                                                                                            model: string
                                                                                                                                                                            modelUsage: ModelUsage
                                                                                                                                                                            options: Options
                                                                                                                                                                            qualityFilter: QualityFilter
                                                                                                                                                                            service: string
                                                                                                                                                                            stream: boolean
                                                                                                                                                                            temperature?: number
                                                                                                                                                                            think: boolean
                                                                                                                                                                            tools?: Tool[]
                                                                                                                                                                            DEFAULT_BASE_URL: string
                                                                                                                                                                            DEFAULT_MODEL: string
                                                                                                                                                                            isBearerAuth: boolean = false
                                                                                                                                                                            isLocal: boolean = false
                                                                                                                                                                            parsers: parsers = parsers
                                                                                                                                                                            service: string

                                                                                                                                                                            Accessors

                                                                                                                                                                            • get apiKey(): undefined | null | string

                                                                                                                                                                              Returns undefined | null | string

                                                                                                                                                                            • get chatUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            • get isLocal(): boolean

                                                                                                                                                                              Returns boolean

                                                                                                                                                                            • get llmHeaders(): Record<string, string>

                                                                                                                                                                              Returns Record<string, string>

                                                                                                                                                                            • get modelsUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            Methods

                                                                                                                                                                            • Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • input: string
                                                                                                                                                                              • Optionaloptions: Options

                                                                                                                                                                              Returns Promise<
                                                                                                                                                                                  | string
                                                                                                                                                                                  | Response
                                                                                                                                                                                  | PartialStreamResponse
                                                                                                                                                                                  | AsyncGenerator<string, any, any>,
                                                                                                                                                                              >

                                                                                                                                                                            • Returns Promise<Model[]>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns boolean

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Returns string

                                                                                                                                                                            • Returns Promise<Model[]>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • chunk: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • model: any

                                                                                                                                                                              Returns Model

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • chunk: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns ToolCall[]

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • chunk: any

                                                                                                                                                                              Returns ToolCall[]

                                                                                                                                                                            • Returns Promise<void>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Returns Promise<boolean>

                                                                                                                                                                            +

                                                                                                                                                                            Constructors

                                                                                                                                                                            Properties

                                                                                                                                                                            baseUrl: string
                                                                                                                                                                            extended: boolean
                                                                                                                                                                            json?: boolean
                                                                                                                                                                            max_thinking_tokens?: number
                                                                                                                                                                            max_tokens: number
                                                                                                                                                                            messages: Message[]
                                                                                                                                                                            model: string
                                                                                                                                                                            modelUsage: ModelUsage
                                                                                                                                                                            options: Options
                                                                                                                                                                            qualityFilter: QualityFilter
                                                                                                                                                                            service: string
                                                                                                                                                                            stream: boolean
                                                                                                                                                                            temperature?: number
                                                                                                                                                                            think: boolean
                                                                                                                                                                            tools?: Tool[]
                                                                                                                                                                            DEFAULT_BASE_URL: string
                                                                                                                                                                            DEFAULT_MODEL: string
                                                                                                                                                                            isBearerAuth: boolean = false
                                                                                                                                                                            isLocal: boolean = false
                                                                                                                                                                            MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                            parsers: parsers = parsers
                                                                                                                                                                            service: string

                                                                                                                                                                            Accessors

                                                                                                                                                                            • get apiKey(): undefined | null | string

                                                                                                                                                                              Returns undefined | null | string

                                                                                                                                                                            • get chatUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            • get isLocal(): boolean

                                                                                                                                                                              Returns boolean

                                                                                                                                                                            • get llmHeaders(): Record<string, string>

                                                                                                                                                                              Returns Record<string, string>

                                                                                                                                                                            • get modelsUrl(): string

                                                                                                                                                                              Returns string

                                                                                                                                                                            Methods

                                                                                                                                                                            • Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • input: string
                                                                                                                                                                              • Optionaloptions: Options

                                                                                                                                                                              Returns Promise<
                                                                                                                                                                                  | string
                                                                                                                                                                                  | Response
                                                                                                                                                                                  | PartialStreamResponse
                                                                                                                                                                                  | AsyncGenerator<string, any, any>,
                                                                                                                                                                              >

                                                                                                                                                                            • Returns Promise<Model[]>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns boolean

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Returns string

                                                                                                                                                                            • Returns Promise<Model[]>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns any

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: any

                                                                                                                                                                              Returns any[]

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • chunk: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • model: any

                                                                                                                                                                              Returns Model

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • chunk: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns ToolCall[]

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • chunk: any

                                                                                                                                                                              Returns ToolCall[]

                                                                                                                                                                            • Returns Promise<void>

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • data: any

                                                                                                                                                                              Returns string

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              • content: string

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Parameters

                                                                                                                                                                              Returns void

                                                                                                                                                                            • Returns Promise<boolean>

                                                                                                                                                                            diff --git a/public/docs/classes/Ollama.html b/public/docs/classes/Ollama.html index 7064fbf..2563022 100644 --- a/public/docs/classes/Ollama.html +++ b/public/docs/classes/Ollama.html @@ -1,4 +1,4 @@ -Ollama | @themaximalist/llm.js
                                                                                                                                                                            @themaximalist/llm.js
                                                                                                                                                                              Preparing search index...

                                                                                                                                                                              Class Ollama

                                                                                                                                                                              Hierarchy (View Summary)

                                                                                                                                                                              Index

                                                                                                                                                                              Constructors

                                                                                                                                                                              constructor +Ollama | @themaximalist/llm.js
                                                                                                                                                                              @themaximalist/llm.js
                                                                                                                                                                                Preparing search index...

                                                                                                                                                                                Class Ollama

                                                                                                                                                                                Hierarchy (View Summary)

                                                                                                                                                                                Index

                                                                                                                                                                                Constructors

                                                                                                                                                                                Properties

                                                                                                                                                                                baseUrl: string
                                                                                                                                                                                extended: boolean
                                                                                                                                                                                json?: boolean
                                                                                                                                                                                max_thinking_tokens?: number
                                                                                                                                                                                max_tokens: number
                                                                                                                                                                                messages: Message[]
                                                                                                                                                                                model: string
                                                                                                                                                                                modelUsage: ModelUsage
                                                                                                                                                                                options: Options
                                                                                                                                                                                qualityFilter: QualityFilter
                                                                                                                                                                                service: string
                                                                                                                                                                                stream: boolean
                                                                                                                                                                                temperature?: number
                                                                                                                                                                                think: boolean
                                                                                                                                                                                tools?: Tool[]
                                                                                                                                                                                DEFAULT_BASE_URL: string = "http://localhost:11434"
                                                                                                                                                                                DEFAULT_MODEL: string = "gemma3:4b"
                                                                                                                                                                                isBearerAuth: boolean = false
                                                                                                                                                                                isLocal: boolean = true
                                                                                                                                                                                parsers: parsers = parsers
                                                                                                                                                                                service: string = "ollama"

                                                                                                                                                                                Accessors

                                                                                                                                                                                • get apiKey(): undefined | null | string

                                                                                                                                                                                  Returns undefined | null | string

                                                                                                                                                                                • get chatUrl(): string

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • get isLocal(): boolean

                                                                                                                                                                                  Returns boolean

                                                                                                                                                                                • get llmHeaders(): Record<string, string>

                                                                                                                                                                                  Returns Record<string, string>

                                                                                                                                                                                • get llmOptions(): Options

                                                                                                                                                                                  Returns Options

                                                                                                                                                                                • get modelsUrl(): string

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • get modelUrl(): string

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • get parsers(): Parsers

                                                                                                                                                                                  Returns Parsers

                                                                                                                                                                                Methods

                                                                                                                                                                                • Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: string

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • model: string

                                                                                                                                                                                  Returns Promise<any>

                                                                                                                                                                                • Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • data: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • chunk: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • data: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • chunk: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • usage: any

                                                                                                                                                                                  Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                                • Returns Promise<void>

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • data: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: string

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: string

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Returns Promise<boolean>

                                                                                                                                                                                +

                                                                                                                                                                                Constructors

                                                                                                                                                                                Properties

                                                                                                                                                                                baseUrl: string
                                                                                                                                                                                extended: boolean
                                                                                                                                                                                json?: boolean
                                                                                                                                                                                max_thinking_tokens?: number
                                                                                                                                                                                max_tokens: number
                                                                                                                                                                                messages: Message[]
                                                                                                                                                                                model: string
                                                                                                                                                                                modelUsage: ModelUsage
                                                                                                                                                                                options: Options
                                                                                                                                                                                qualityFilter: QualityFilter
                                                                                                                                                                                service: string
                                                                                                                                                                                stream: boolean
                                                                                                                                                                                temperature?: number
                                                                                                                                                                                think: boolean
                                                                                                                                                                                tools?: Tool[]
                                                                                                                                                                                DEFAULT_BASE_URL: string = "http://localhost:11434"
                                                                                                                                                                                DEFAULT_MODEL: string = "gemma3:4b"
                                                                                                                                                                                isBearerAuth: boolean = false
                                                                                                                                                                                isLocal: boolean = true
                                                                                                                                                                                MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                                parsers: parsers = parsers
                                                                                                                                                                                service: string = "ollama"

                                                                                                                                                                                Accessors

                                                                                                                                                                                • get apiKey(): undefined | null | string

                                                                                                                                                                                  Returns undefined | null | string

                                                                                                                                                                                • get chatUrl(): string

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • get isLocal(): boolean

                                                                                                                                                                                  Returns boolean

                                                                                                                                                                                • get llmHeaders(): Record<string, string>

                                                                                                                                                                                  Returns Record<string, string>

                                                                                                                                                                                • get llmOptions(): Options

                                                                                                                                                                                  Returns Options

                                                                                                                                                                                • get modelsUrl(): string

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • get modelUrl(): string

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • get parsers(): Parsers

                                                                                                                                                                                  Returns Parsers

                                                                                                                                                                                Methods

                                                                                                                                                                                • Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: string

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • model: string

                                                                                                                                                                                  Returns Promise<any>

                                                                                                                                                                                • Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: any

                                                                                                                                                                                  Returns any[]

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • data: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • chunk: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • data: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • chunk: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • usage: any

                                                                                                                                                                                  Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                                • Returns Promise<void>

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • data: any

                                                                                                                                                                                  Returns string

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: string

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  • content: string

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Parameters

                                                                                                                                                                                  Returns void

                                                                                                                                                                                • Returns Promise<boolean>

                                                                                                                                                                                diff --git a/public/docs/classes/OpenAI.html b/public/docs/classes/OpenAI.html index 610a734..062ddb4 100644 --- a/public/docs/classes/OpenAI.html +++ b/public/docs/classes/OpenAI.html @@ -1,4 +1,4 @@ -OpenAI | @themaximalist/llm.js
                                                                                                                                                                                @themaximalist/llm.js
                                                                                                                                                                                  Preparing search index...

                                                                                                                                                                                  Class OpenAI

                                                                                                                                                                                  Hierarchy (View Summary)

                                                                                                                                                                                  Index

                                                                                                                                                                                  Constructors

                                                                                                                                                                                  constructor +OpenAI | @themaximalist/llm.js
                                                                                                                                                                                  @themaximalist/llm.js
                                                                                                                                                                                    Preparing search index...

                                                                                                                                                                                    Class OpenAI

                                                                                                                                                                                    Hierarchy (View Summary)

                                                                                                                                                                                    Index

                                                                                                                                                                                    Constructors

                                                                                                                                                                                    Properties

                                                                                                                                                                                    baseUrl: string
                                                                                                                                                                                    extended: boolean
                                                                                                                                                                                    json?: boolean
                                                                                                                                                                                    max_thinking_tokens?: number
                                                                                                                                                                                    max_tokens: number
                                                                                                                                                                                    messages: Message[]
                                                                                                                                                                                    model: string
                                                                                                                                                                                    modelUsage: ModelUsage
                                                                                                                                                                                    options: Options
                                                                                                                                                                                    qualityFilter: QualityFilter
                                                                                                                                                                                    service: string
                                                                                                                                                                                    stream: boolean
                                                                                                                                                                                    temperature?: number
                                                                                                                                                                                    think: boolean
                                                                                                                                                                                    tools?: Tool[]
                                                                                                                                                                                    DEFAULT_BASE_URL: string = "https://api.openai.com/v1"
                                                                                                                                                                                    DEFAULT_MODEL: string = "gpt-4o-mini"
                                                                                                                                                                                    isBearerAuth: boolean = true
                                                                                                                                                                                    isLocal: boolean = false
                                                                                                                                                                                    parsers: parsers = parsers
                                                                                                                                                                                    service: string = "openai"

                                                                                                                                                                                    Accessors

                                                                                                                                                                                    • get apiKey(): undefined | null | string

                                                                                                                                                                                      Returns undefined | null | string

                                                                                                                                                                                    • get chatUrl(): string

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • get isLocal(): boolean

                                                                                                                                                                                      Returns boolean

                                                                                                                                                                                    • get llmHeaders(): Record<string, string>

                                                                                                                                                                                      Returns Record<string, string>

                                                                                                                                                                                    • get llmOptions(): Options

                                                                                                                                                                                      Returns Options

                                                                                                                                                                                    • get modelsUrl(): string

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • get parsers(): Parsers

                                                                                                                                                                                      Returns Parsers

                                                                                                                                                                                    Methods

                                                                                                                                                                                    • Returns void

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: string

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • chunk: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • chunk: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                                    • Returns Promise<void>

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: string

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: string

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Returns Promise<boolean>

                                                                                                                                                                                    +

                                                                                                                                                                                    Constructors

                                                                                                                                                                                    Properties

                                                                                                                                                                                    baseUrl: string
                                                                                                                                                                                    extended: boolean
                                                                                                                                                                                    json?: boolean
                                                                                                                                                                                    max_thinking_tokens?: number
                                                                                                                                                                                    max_tokens: number
                                                                                                                                                                                    messages: Message[]
                                                                                                                                                                                    model: string
                                                                                                                                                                                    modelUsage: ModelUsage
                                                                                                                                                                                    options: Options
                                                                                                                                                                                    qualityFilter: QualityFilter
                                                                                                                                                                                    service: string
                                                                                                                                                                                    stream: boolean
                                                                                                                                                                                    temperature?: number
                                                                                                                                                                                    think: boolean
                                                                                                                                                                                    tools?: Tool[]
                                                                                                                                                                                    DEFAULT_BASE_URL: string = "https://api.openai.com/v1"
                                                                                                                                                                                    DEFAULT_MODEL: string = "gpt-4o-mini"
                                                                                                                                                                                    isBearerAuth: boolean = true
                                                                                                                                                                                    isLocal: boolean = false
                                                                                                                                                                                    MessageExtendedContentInputKey: string = "input_text"
                                                                                                                                                                                    parsers: parsers = parsers
                                                                                                                                                                                    service: string = "openai"

                                                                                                                                                                                    Accessors

                                                                                                                                                                                    • get apiKey(): undefined | null | string

                                                                                                                                                                                      Returns undefined | null | string

                                                                                                                                                                                    • get chatUrl(): string

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • get isLocal(): boolean

                                                                                                                                                                                      Returns boolean

                                                                                                                                                                                    • get llmHeaders(): Record<string, string>

                                                                                                                                                                                      Returns Record<string, string>

                                                                                                                                                                                    • get llmOptions(): Options

                                                                                                                                                                                      Returns Options

                                                                                                                                                                                    • get modelsUrl(): string

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • get parsers(): Parsers

                                                                                                                                                                                      Returns Parsers

                                                                                                                                                                                    Methods

                                                                                                                                                                                    • Returns void

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: string

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: any

                                                                                                                                                                                      Returns any[]

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • chunk: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • chunk: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                                    • Returns Promise<void>

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • data: any

                                                                                                                                                                                      Returns string

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: string

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      • content: string

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Parameters

                                                                                                                                                                                      Returns void

                                                                                                                                                                                    • Returns Promise<boolean>

                                                                                                                                                                                    diff --git a/public/docs/classes/xAI.html b/public/docs/classes/xAI.html index db9d9b6..9b39b1f 100644 --- a/public/docs/classes/xAI.html +++ b/public/docs/classes/xAI.html @@ -1,4 +1,4 @@ -xAI | @themaximalist/llm.js
                                                                                                                                                                                    @themaximalist/llm.js
                                                                                                                                                                                      Preparing search index...

                                                                                                                                                                                      Class xAI

                                                                                                                                                                                      Hierarchy (View Summary)

                                                                                                                                                                                      Index

                                                                                                                                                                                      Constructors

                                                                                                                                                                                      constructor +xAI | @themaximalist/llm.js
                                                                                                                                                                                      @themaximalist/llm.js
                                                                                                                                                                                        Preparing search index...

                                                                                                                                                                                        Class xAI

                                                                                                                                                                                        Hierarchy (View Summary)

                                                                                                                                                                                        Index

                                                                                                                                                                                        Constructors

                                                                                                                                                                                        Properties

                                                                                                                                                                                        baseUrl: string
                                                                                                                                                                                        extended: boolean
                                                                                                                                                                                        json?: boolean
                                                                                                                                                                                        max_thinking_tokens?: number
                                                                                                                                                                                        max_tokens: number
                                                                                                                                                                                        messages: Message[]
                                                                                                                                                                                        model: string
                                                                                                                                                                                        modelUsage: ModelUsage
                                                                                                                                                                                        options: Options
                                                                                                                                                                                        qualityFilter: QualityFilter
                                                                                                                                                                                        service: string
                                                                                                                                                                                        stream: boolean
                                                                                                                                                                                        temperature?: number
                                                                                                                                                                                        think: boolean
                                                                                                                                                                                        tools?: Tool[]
                                                                                                                                                                                        DEFAULT_BASE_URL: string = "https://api.x.ai/v1/"
                                                                                                                                                                                        DEFAULT_MODEL: string = "grok-3"
                                                                                                                                                                                        isBearerAuth: boolean = true
                                                                                                                                                                                        isLocal: boolean = false
                                                                                                                                                                                        KEY_REASONING_CONTENT: string = "reasoning_content"
                                                                                                                                                                                        parsers: parsers = parsers
                                                                                                                                                                                        service: string = "xai"

                                                                                                                                                                                        Accessors

                                                                                                                                                                                        • get apiKey(): undefined | null | string

                                                                                                                                                                                          Returns undefined | null | string

                                                                                                                                                                                        • get chatUrl(): string

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • get isLocal(): boolean

                                                                                                                                                                                          Returns boolean

                                                                                                                                                                                        • get llmHeaders(): Record<string, string>

                                                                                                                                                                                          Returns Record<string, string>

                                                                                                                                                                                        • get llmOptions(): Options

                                                                                                                                                                                          Returns Options

                                                                                                                                                                                        • get modelsUrl(): string

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • get parsers(): Parsers

                                                                                                                                                                                          Returns Parsers

                                                                                                                                                                                        Methods

                                                                                                                                                                                        • Returns void

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: string

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                                        • Returns Promise<void>

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: string

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: string

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Returns Promise<boolean>

                                                                                                                                                                                        +

                                                                                                                                                                                        Constructors

                                                                                                                                                                                        Properties

                                                                                                                                                                                        baseUrl: string
                                                                                                                                                                                        extended: boolean
                                                                                                                                                                                        json?: boolean
                                                                                                                                                                                        max_thinking_tokens?: number
                                                                                                                                                                                        max_tokens: number
                                                                                                                                                                                        messages: Message[]
                                                                                                                                                                                        model: string
                                                                                                                                                                                        modelUsage: ModelUsage
                                                                                                                                                                                        options: Options
                                                                                                                                                                                        qualityFilter: QualityFilter
                                                                                                                                                                                        service: string
                                                                                                                                                                                        stream: boolean
                                                                                                                                                                                        temperature?: number
                                                                                                                                                                                        think: boolean
                                                                                                                                                                                        tools?: Tool[]
                                                                                                                                                                                        DEFAULT_BASE_URL: string = "https://api.x.ai/v1/"
                                                                                                                                                                                        DEFAULT_MODEL: string = "grok-3"
                                                                                                                                                                                        isBearerAuth: boolean = true
                                                                                                                                                                                        isLocal: boolean = false
                                                                                                                                                                                        KEY_REASONING_CONTENT: string = "reasoning_content"
                                                                                                                                                                                        MessageExtendedContentInputKey: string = "text"
                                                                                                                                                                                        parsers: parsers = parsers
                                                                                                                                                                                        service: string = "xai"

                                                                                                                                                                                        Accessors

                                                                                                                                                                                        • get apiKey(): undefined | null | string

                                                                                                                                                                                          Returns undefined | null | string

                                                                                                                                                                                        • get chatUrl(): string

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • get isLocal(): boolean

                                                                                                                                                                                          Returns boolean

                                                                                                                                                                                        • get llmHeaders(): Record<string, string>

                                                                                                                                                                                          Returns Record<string, string>

                                                                                                                                                                                        • get llmOptions(): Options

                                                                                                                                                                                          Returns Options

                                                                                                                                                                                        • get modelsUrl(): string

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • get parsers(): Parsers

                                                                                                                                                                                          Returns Parsers

                                                                                                                                                                                        Methods

                                                                                                                                                                                        • Returns void

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: string

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          Returns { image_url: { detail: string; url: string }; type: string }

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: any

                                                                                                                                                                                          Returns any[]

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns null | { input_tokens: any; output_tokens: any }

                                                                                                                                                                                        • Returns Promise<void>

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • data: any

                                                                                                                                                                                          Returns string

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: string

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          • content: string

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Parameters

                                                                                                                                                                                          Returns void

                                                                                                                                                                                        • Returns Promise<boolean>

                                                                                                                                                                                        diff --git a/public/docs/hierarchy.html b/public/docs/hierarchy.html index a394b76..41d9226 100644 --- a/public/docs/hierarchy.html +++ b/public/docs/hierarchy.html @@ -1 +1 @@ -@themaximalist/llm.js
                                                                                                                                                                                        @themaximalist/llm.js
                                                                                                                                                                                          Preparing search index...
                                                                                                                                                                                          +@themaximalist/llm.js
                                                                                                                                                                                          @themaximalist/llm.js
                                                                                                                                                                                            Preparing search index...
                                                                                                                                                                                            diff --git a/public/docs/interfaces/AnthropicOptions.html b/public/docs/interfaces/AnthropicOptions.html index f2e2568..0431f26 100644 --- a/public/docs/interfaces/AnthropicOptions.html +++ b/public/docs/interfaces/AnthropicOptions.html @@ -1,4 +1,4 @@ -AnthropicOptions | @themaximalist/llm.js
                                                                                                                                                                                            @themaximalist/llm.js
                                                                                                                                                                                              Preparing search index...

                                                                                                                                                                                              Interface AnthropicOptions

                                                                                                                                                                                              interface AnthropicOptions {
                                                                                                                                                                                                  apiKey?: string;
                                                                                                                                                                                                  attachments?: Attachment[];
                                                                                                                                                                                                  baseUrl?: string;
                                                                                                                                                                                                  extended?: boolean;
                                                                                                                                                                                                  json?: boolean;
                                                                                                                                                                                                  max_thinking_tokens?: number;
                                                                                                                                                                                                  max_tokens?: number;
                                                                                                                                                                                                  messages?: Message[];
                                                                                                                                                                                                  model?: string;
                                                                                                                                                                                                  parser?: ParserResponse;
                                                                                                                                                                                                  qualityFilter?: QualityFilter;
                                                                                                                                                                                                  service?: string;
                                                                                                                                                                                                  stream?: boolean;
                                                                                                                                                                                                  temperature?: number;
                                                                                                                                                                                                  think?: boolean;
                                                                                                                                                                                                  thinking: { budget_tokens: number; type: "enabled" | "disabled" };
                                                                                                                                                                                                  tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                                                                                                                                              }

                                                                                                                                                                                              Hierarchy (View Summary)

                                                                                                                                                                                              Index

                                                                                                                                                                                              Properties

                                                                                                                                                                                              apiKey? +AnthropicOptions | @themaximalist/llm.js
                                                                                                                                                                                              @themaximalist/llm.js
                                                                                                                                                                                                Preparing search index...

                                                                                                                                                                                                Interface AnthropicOptions

                                                                                                                                                                                                interface AnthropicOptions {
                                                                                                                                                                                                    apiKey?: string;
                                                                                                                                                                                                    attachments?: Attachment[];
                                                                                                                                                                                                    baseUrl?: string;
                                                                                                                                                                                                    extended?: boolean;
                                                                                                                                                                                                    json?: boolean;
                                                                                                                                                                                                    max_thinking_tokens?: number;
                                                                                                                                                                                                    max_tokens?: number;
                                                                                                                                                                                                    messages?: Message[];
                                                                                                                                                                                                    model?: string;
                                                                                                                                                                                                    parser?: ParserResponse;
                                                                                                                                                                                                    qualityFilter?: QualityFilter;
                                                                                                                                                                                                    service?: string;
                                                                                                                                                                                                    stream?: boolean;
                                                                                                                                                                                                    temperature?: number;
                                                                                                                                                                                                    think?: boolean;
                                                                                                                                                                                                    thinking: { budget_tokens: number; type: "enabled" | "disabled" };
                                                                                                                                                                                                    tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                                                                                                                                                }

                                                                                                                                                                                                Hierarchy (View Summary)

                                                                                                                                                                                                Index

                                                                                                                                                                                                Properties

                                                                                                                                                                                                apiKey? attachments? baseUrl? extended? diff --git a/public/docs/interfaces/GoogleMessage.html b/public/docs/interfaces/GoogleMessage.html new file mode 100644 index 0000000..714d854 --- /dev/null +++ b/public/docs/interfaces/GoogleMessage.html @@ -0,0 +1,3 @@ +GoogleMessage | @themaximalist/llm.js
                                                                                                                                                                                                @themaximalist/llm.js
                                                                                                                                                                                                  Preparing search index...

                                                                                                                                                                                                  Interface GoogleMessage

                                                                                                                                                                                                  interface GoogleMessage {
                                                                                                                                                                                                      parts: (
                                                                                                                                                                                                          | { text?: string }
                                                                                                                                                                                                          | { inline_data?: { data: string; mime_type: string } }
                                                                                                                                                                                                      )[];
                                                                                                                                                                                                      role: "user" | "model";
                                                                                                                                                                                                  }
                                                                                                                                                                                                  Index

                                                                                                                                                                                                  Properties

                                                                                                                                                                                                  Properties

                                                                                                                                                                                                  parts: (
                                                                                                                                                                                                      | { text?: string }
                                                                                                                                                                                                      | { inline_data?: { data: string; mime_type: string } }
                                                                                                                                                                                                  )[]
                                                                                                                                                                                                  role: "user" | "model"
                                                                                                                                                                                                  diff --git a/public/docs/interfaces/GoogleOptions.html b/public/docs/interfaces/GoogleOptions.html index 1031efc..34dfe33 100644 --- a/public/docs/interfaces/GoogleOptions.html +++ b/public/docs/interfaces/GoogleOptions.html @@ -1,4 +1,4 @@ -GoogleOptions | @themaximalist/llm.js
                                                                                                                                                                                                  @themaximalist/llm.js
                                                                                                                                                                                                    Preparing search index...

                                                                                                                                                                                                    Interface GoogleOptions

                                                                                                                                                                                                    interface GoogleOptions {
                                                                                                                                                                                                        apiKey?: string;
                                                                                                                                                                                                        attachments?: Attachment[];
                                                                                                                                                                                                        baseUrl?: string;
                                                                                                                                                                                                        contents?: { parts: { text: string }[] }[];
                                                                                                                                                                                                        extended?: boolean;
                                                                                                                                                                                                        generationConfig?: {
                                                                                                                                                                                                            maxOutputTokens?: number;
                                                                                                                                                                                                            temperature?: number;
                                                                                                                                                                                                            thinkingConfig?: { includeThoughts: boolean };
                                                                                                                                                                                                        };
                                                                                                                                                                                                        json?: boolean;
                                                                                                                                                                                                        max_thinking_tokens?: number;
                                                                                                                                                                                                        max_tokens?: number;
                                                                                                                                                                                                        messages?: Message[];
                                                                                                                                                                                                        model?: string;
                                                                                                                                                                                                        parser?: ParserResponse;
                                                                                                                                                                                                        qualityFilter?: QualityFilter;
                                                                                                                                                                                                        service?: string;
                                                                                                                                                                                                        stream?: boolean;
                                                                                                                                                                                                        system_instruction?: { parts: { text: string }[] };
                                                                                                                                                                                                        temperature?: number;
                                                                                                                                                                                                        think?: boolean;
                                                                                                                                                                                                        tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                                                                                                                                                    }

                                                                                                                                                                                                    Hierarchy (View Summary)

                                                                                                                                                                                                    Index

                                                                                                                                                                                                    Properties

                                                                                                                                                                                                    apiKey? +GoogleOptions | @themaximalist/llm.js
                                                                                                                                                                                                    @themaximalist/llm.js
                                                                                                                                                                                                      Preparing search index...

                                                                                                                                                                                                      Interface GoogleOptions

                                                                                                                                                                                                      interface GoogleOptions {
                                                                                                                                                                                                          apiKey?: string;
                                                                                                                                                                                                          attachments?: Attachment[];
                                                                                                                                                                                                          baseUrl?: string;
                                                                                                                                                                                                          contents?: GoogleMessage[];
                                                                                                                                                                                                          extended?: boolean;
                                                                                                                                                                                                          generationConfig?: {
                                                                                                                                                                                                              maxOutputTokens?: number;
                                                                                                                                                                                                              temperature?: number;
                                                                                                                                                                                                              thinkingConfig?: { includeThoughts: boolean };
                                                                                                                                                                                                          };
                                                                                                                                                                                                          json?: boolean;
                                                                                                                                                                                                          max_thinking_tokens?: number;
                                                                                                                                                                                                          max_tokens?: number;
                                                                                                                                                                                                          messages?: Message[];
                                                                                                                                                                                                          model?: string;
                                                                                                                                                                                                          parser?: ParserResponse;
                                                                                                                                                                                                          qualityFilter?: QualityFilter;
                                                                                                                                                                                                          service?: string;
                                                                                                                                                                                                          stream?: boolean;
                                                                                                                                                                                                          system_instruction?: { parts: { text: string }[] };
                                                                                                                                                                                                          temperature?: number;
                                                                                                                                                                                                          think?: boolean;
                                                                                                                                                                                                          tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                                                                                                                                                      }

                                                                                                                                                                                                      Hierarchy (View Summary)

                                                                                                                                                                                                      Index

                                                                                                                                                                                                      Properties

                                                                                                                                                                                                      Properties

                                                                                                                                                                                                      apiKey?: string

                                                                                                                                                                                                      API Key for the service, Usage.local services do not need an API key

                                                                                                                                                                                                      attachments?: Attachment[]

                                                                                                                                                                                                      Attachments to send to the model

                                                                                                                                                                                                      baseUrl?: string

                                                                                                                                                                                                      Base URL for the service

                                                                                                                                                                                                      -
                                                                                                                                                                                                      contents?: { parts: { text: string }[] }[]
                                                                                                                                                                                                      extended?: boolean

                                                                                                                                                                                                      Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                                                                                                                                                      -
                                                                                                                                                                                                      generationConfig?: {
                                                                                                                                                                                                          maxOutputTokens?: number;
                                                                                                                                                                                                          temperature?: number;
                                                                                                                                                                                                          thinkingConfig?: { includeThoughts: boolean };
                                                                                                                                                                                                      }
                                                                                                                                                                                                      json?: boolean

                                                                                                                                                                                                      Enables JSON mode in LLM if available and parses output with parsers.json

                                                                                                                                                                                                      +
                                                                                                                                                                                                      contents?: GoogleMessage[]
                                                                                                                                                                                                      extended?: boolean

                                                                                                                                                                                                      Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                                                                                                                                                      +
                                                                                                                                                                                                      generationConfig?: {
                                                                                                                                                                                                          maxOutputTokens?: number;
                                                                                                                                                                                                          temperature?: number;
                                                                                                                                                                                                          thinkingConfig?: { includeThoughts: boolean };
                                                                                                                                                                                                      }
                                                                                                                                                                                                      json?: boolean

                                                                                                                                                                                                      Enables JSON mode in LLM if available and parses output with parsers.json

                                                                                                                                                                                                      max_thinking_tokens?: number

                                                                                                                                                                                                      Maximum number of tokens to use when thinking is enabled

                                                                                                                                                                                                      max_tokens?: number

                                                                                                                                                                                                      Maximum number of tokens to generate

                                                                                                                                                                                                      messages?: Message[]

                                                                                                                                                                                                      Messages to send to the model

                                                                                                                                                                                                      @@ -30,7 +30,7 @@
                                                                                                                                                                                                      qualityFilter?: QualityFilter

                                                                                                                                                                                                      Quality filter when dealing with model usage

                                                                                                                                                                                                      service?: string

                                                                                                                                                                                                      Service to use, defaults to Ollama

                                                                                                                                                                                                      stream?: boolean

                                                                                                                                                                                                      Enables streaming mode

                                                                                                                                                                                                      -
                                                                                                                                                                                                      system_instruction?: { parts: { text: string }[] }
                                                                                                                                                                                                      temperature?: number

                                                                                                                                                                                                      Temperature for the model

                                                                                                                                                                                                      +
                                                                                                                                                                                                      system_instruction?: { parts: { text: string }[] }
                                                                                                                                                                                                      temperature?: number

                                                                                                                                                                                                      Temperature for the model

                                                                                                                                                                                                      think?: boolean

                                                                                                                                                                                                      Enables thinking mode

                                                                                                                                                                                                      tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                                                                                                                                                      Tools available for the model to use, will enable Options.extended

                                                                                                                                                                                                      diff --git a/public/docs/interfaces/GoogleTool.html b/public/docs/interfaces/GoogleTool.html index 649c558..782642f 100644 --- a/public/docs/interfaces/GoogleTool.html +++ b/public/docs/interfaces/GoogleTool.html @@ -1,4 +1,4 @@ -GoogleTool | @themaximalist/llm.js
                                                                                                                                                                                                      @themaximalist/llm.js
                                                                                                                                                                                                        Preparing search index...

                                                                                                                                                                                                        Interface GoogleTool

                                                                                                                                                                                                        interface GoogleTool {
                                                                                                                                                                                                            description: string;
                                                                                                                                                                                                            name: string;
                                                                                                                                                                                                            parameters: Record<string, any>;
                                                                                                                                                                                                        }
                                                                                                                                                                                                        Index

                                                                                                                                                                                                        Properties

                                                                                                                                                                                                        description +GoogleTool | @themaximalist/llm.js
                                                                                                                                                                                                        @themaximalist/llm.js
                                                                                                                                                                                                          Preparing search index...

                                                                                                                                                                                                          Interface GoogleTool

                                                                                                                                                                                                          interface GoogleTool {
                                                                                                                                                                                                              description: string;
                                                                                                                                                                                                              name: string;
                                                                                                                                                                                                              parameters: Record<string, any>;
                                                                                                                                                                                                          }
                                                                                                                                                                                                          Index

                                                                                                                                                                                                          Properties

                                                                                                                                                                                                          description: string
                                                                                                                                                                                                          name: string
                                                                                                                                                                                                          parameters: Record<string, any>
                                                                                                                                                                                                          +

                                                                                                                                                                                                          Properties

                                                                                                                                                                                                          description: string
                                                                                                                                                                                                          name: string
                                                                                                                                                                                                          parameters: Record<string, any>
                                                                                                                                                                                                          diff --git a/public/docs/interfaces/OllamaOptions.html b/public/docs/interfaces/OllamaOptions.html index decf5e6..7090247 100644 --- a/public/docs/interfaces/OllamaOptions.html +++ b/public/docs/interfaces/OllamaOptions.html @@ -1,4 +1,4 @@ -OllamaOptions | @themaximalist/llm.js
                                                                                                                                                                                                          @themaximalist/llm.js
                                                                                                                                                                                                            Preparing search index...

                                                                                                                                                                                                            Interface OllamaOptions

                                                                                                                                                                                                            interface OllamaOptions {
                                                                                                                                                                                                                apiKey?: string;
                                                                                                                                                                                                                attachments?: Attachment[];
                                                                                                                                                                                                                baseUrl?: string;
                                                                                                                                                                                                                extended?: boolean;
                                                                                                                                                                                                                json?: boolean;
                                                                                                                                                                                                                max_thinking_tokens?: number;
                                                                                                                                                                                                                max_tokens?: number;
                                                                                                                                                                                                                messages?: Message[];
                                                                                                                                                                                                                model?: string;
                                                                                                                                                                                                                options?: { num_predict?: number };
                                                                                                                                                                                                                parser?: ParserResponse;
                                                                                                                                                                                                                qualityFilter?: QualityFilter;
                                                                                                                                                                                                                service?: string;
                                                                                                                                                                                                                stream?: boolean;
                                                                                                                                                                                                                temperature?: number;
                                                                                                                                                                                                                think?: boolean;
                                                                                                                                                                                                                tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                                                                                                                                                            }

                                                                                                                                                                                                            Hierarchy (View Summary)

                                                                                                                                                                                                            Index

                                                                                                                                                                                                            Properties

                                                                                                                                                                                                            apiKey? +OllamaOptions | @themaximalist/llm.js
                                                                                                                                                                                                            @themaximalist/llm.js
                                                                                                                                                                                                              Preparing search index...

                                                                                                                                                                                                              Interface OllamaOptions

                                                                                                                                                                                                              interface OllamaOptions {
                                                                                                                                                                                                                  apiKey?: string;
                                                                                                                                                                                                                  attachments?: Attachment[];
                                                                                                                                                                                                                  baseUrl?: string;
                                                                                                                                                                                                                  extended?: boolean;
                                                                                                                                                                                                                  json?: boolean;
                                                                                                                                                                                                                  max_thinking_tokens?: number;
                                                                                                                                                                                                                  max_tokens?: number;
                                                                                                                                                                                                                  messages?: Message[];
                                                                                                                                                                                                                  model?: string;
                                                                                                                                                                                                                  options?: { num_predict?: number };
                                                                                                                                                                                                                  parser?: ParserResponse;
                                                                                                                                                                                                                  qualityFilter?: QualityFilter;
                                                                                                                                                                                                                  service?: string;
                                                                                                                                                                                                                  stream?: boolean;
                                                                                                                                                                                                                  temperature?: number;
                                                                                                                                                                                                                  think?: boolean;
                                                                                                                                                                                                                  tools?: Tool[] | WrappedTool[] | OpenAITool[];
                                                                                                                                                                                                              }

                                                                                                                                                                                                              Hierarchy (View Summary)

                                                                                                                                                                                                              Index

                                                                                                                                                                                                              Properties

                                                                                                                                                                                                              apiKey? attachments? baseUrl? extended? @@ -24,11 +24,11 @@
                                                                                                                                                                                                              max_tokens?: number

                                                                                                                                                                                                              Maximum number of tokens to generate

                                                                                                                                                                                                              messages?: Message[]

                                                                                                                                                                                                              Messages to send to the model

                                                                                                                                                                                                              model?: string

                                                                                                                                                                                                              Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                                                                                                                                                              -
                                                                                                                                                                                                              options?: { num_predict?: number }

                                                                                                                                                                                                              Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                                                                                                                                                              +
                                                                                                                                                                                                              options?: { num_predict?: number }

                                                                                                                                                                                                              Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                                                                                                                                                              qualityFilter?: QualityFilter

                                                                                                                                                                                                              Quality filter when dealing with model usage

                                                                                                                                                                                                              service?: string

                                                                                                                                                                                                              Service to use, defaults to Ollama

                                                                                                                                                                                                              stream?: boolean

                                                                                                                                                                                                              Enables streaming mode

                                                                                                                                                                                                              temperature?: number

                                                                                                                                                                                                              Temperature for the model

                                                                                                                                                                                                              think?: boolean

                                                                                                                                                                                                              Enables thinking mode

                                                                                                                                                                                                              -
                                                                                                                                                                                                              tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                                                                                                                                                              Tools available for the model to use, will enable Options.extended

                                                                                                                                                                                                              +
                                                                                                                                                                                                              tools?: Tool[] | WrappedTool[] | OpenAITool[]

                                                                                                                                                                                                              Tools available for the model to use, will enable Options.extended

                                                                                                                                                                                                              diff --git a/public/docs/interfaces/OpenAIOptions.html b/public/docs/interfaces/OpenAIOptions.html index e0271fd..a877946 100644 --- a/public/docs/interfaces/OpenAIOptions.html +++ b/public/docs/interfaces/OpenAIOptions.html @@ -1,4 +1,4 @@ -OpenAIOptions | @themaximalist/llm.js
                                                                                                                                                                                                              @themaximalist/llm.js
                                                                                                                                                                                                                Preparing search index...

                                                                                                                                                                                                                Interface OpenAIOptions

                                                                                                                                                                                                                interface OpenAIOptions {
                                                                                                                                                                                                                    apiKey?: string;
                                                                                                                                                                                                                    attachments?: Attachment[];
                                                                                                                                                                                                                    baseUrl?: string;
                                                                                                                                                                                                                    extended?: boolean;
                                                                                                                                                                                                                    input?: string | Message[];
                                                                                                                                                                                                                    json?: boolean;
                                                                                                                                                                                                                    max_output_tokens?: number;
                                                                                                                                                                                                                    max_thinking_tokens?: number;
                                                                                                                                                                                                                    max_tokens?: number;
                                                                                                                                                                                                                    messages?: Message[];
                                                                                                                                                                                                                    model?: string;
                                                                                                                                                                                                                    parser?: ParserResponse;
                                                                                                                                                                                                                    qualityFilter?: QualityFilter;
                                                                                                                                                                                                                    reasoning?: {
                                                                                                                                                                                                                        effort: "low" | "medium" | "high";
                                                                                                                                                                                                                        summary: "auto" | "concise" | "detailed";
                                                                                                                                                                                                                    };
                                                                                                                                                                                                                    service?: string;
                                                                                                                                                                                                                    stream?: boolean;
                                                                                                                                                                                                                    temperature?: number;
                                                                                                                                                                                                                    think?: boolean;
                                                                                                                                                                                                                    tools?: Tool[]
                                                                                                                                                                                                                    | WrappedTool[]
                                                                                                                                                                                                                    | OpenAITool[];
                                                                                                                                                                                                                }

                                                                                                                                                                                                                Hierarchy (View Summary)

                                                                                                                                                                                                                Index

                                                                                                                                                                                                                Properties

                                                                                                                                                                                                                apiKey? +OpenAIOptions | @themaximalist/llm.js
                                                                                                                                                                                                                @themaximalist/llm.js
                                                                                                                                                                                                                  Preparing search index...

                                                                                                                                                                                                                  Interface OpenAIOptions

                                                                                                                                                                                                                  interface OpenAIOptions {
                                                                                                                                                                                                                      apiKey?: string;
                                                                                                                                                                                                                      attachments?: Attachment[];
                                                                                                                                                                                                                      baseUrl?: string;
                                                                                                                                                                                                                      extended?: boolean;
                                                                                                                                                                                                                      input?: string | Message[];
                                                                                                                                                                                                                      json?: boolean;
                                                                                                                                                                                                                      max_output_tokens?: number;
                                                                                                                                                                                                                      max_thinking_tokens?: number;
                                                                                                                                                                                                                      max_tokens?: number;
                                                                                                                                                                                                                      messages?: Message[];
                                                                                                                                                                                                                      model?: string;
                                                                                                                                                                                                                      parser?: ParserResponse;
                                                                                                                                                                                                                      qualityFilter?: QualityFilter;
                                                                                                                                                                                                                      reasoning?: {
                                                                                                                                                                                                                          effort: "low" | "medium" | "high";
                                                                                                                                                                                                                          summary: "auto" | "concise" | "detailed";
                                                                                                                                                                                                                      };
                                                                                                                                                                                                                      service?: string;
                                                                                                                                                                                                                      stream?: boolean;
                                                                                                                                                                                                                      temperature?: number;
                                                                                                                                                                                                                      think?: boolean;
                                                                                                                                                                                                                      tools?: Tool[]
                                                                                                                                                                                                                      | WrappedTool[]
                                                                                                                                                                                                                      | OpenAITool[];
                                                                                                                                                                                                                  }

                                                                                                                                                                                                                  Hierarchy (View Summary)

                                                                                                                                                                                                                  Index

                                                                                                                                                                                                                  Properties

                                                                                                                                                                                                                  apiKey? attachments? baseUrl? extended? @@ -21,14 +21,14 @@
                                                                                                                                                                                                                  attachments?: Attachment[]

                                                                                                                                                                                                                  Attachments to send to the model

                                                                                                                                                                                                                  baseUrl?: string

                                                                                                                                                                                                                  Base URL for the service

                                                                                                                                                                                                                  extended?: boolean

                                                                                                                                                                                                                  Returns an extended response with Response, PartialStreamResponse and StreamResponse types

                                                                                                                                                                                                                  -
                                                                                                                                                                                                                  input?: string | Message[]
                                                                                                                                                                                                                  json?: boolean

                                                                                                                                                                                                                  Enables JSON mode in LLM if available and parses output with parsers.json

                                                                                                                                                                                                                  -
                                                                                                                                                                                                                  max_output_tokens?: number
                                                                                                                                                                                                                  max_thinking_tokens?: number

                                                                                                                                                                                                                  Maximum number of tokens to use when thinking is enabled

                                                                                                                                                                                                                  +
                                                                                                                                                                                                                  input?: string | Message[]
                                                                                                                                                                                                                  json?: boolean

                                                                                                                                                                                                                  Enables JSON mode in LLM if available and parses output with parsers.json

                                                                                                                                                                                                                  +
                                                                                                                                                                                                                  max_output_tokens?: number
                                                                                                                                                                                                                  max_thinking_tokens?: number

                                                                                                                                                                                                                  Maximum number of tokens to use when thinking is enabled

                                                                                                                                                                                                                  max_tokens?: number

                                                                                                                                                                                                                  Maximum number of tokens to generate

                                                                                                                                                                                                                  messages?: Message[]

                                                                                                                                                                                                                  Messages to send to the model

                                                                                                                                                                                                                  model?: string

                                                                                                                                                                                                                  Model to use, defaults to Ollama.DEFAULT_MODEL model

                                                                                                                                                                                                                  Custom parser function, defaults include parsers.json, parsers.xml, parsers.codeBlock and parsers.markdown

                                                                                                                                                                                                                  qualityFilter?: QualityFilter

                                                                                                                                                                                                                  Quality filter when dealing with model usage

                                                                                                                                                                                                                  -
                                                                                                                                                                                                                  reasoning?: {
                                                                                                                                                                                                                      effort: "low" | "medium" | "high";
                                                                                                                                                                                                                      summary: "auto" | "concise" | "detailed";
                                                                                                                                                                                                                  }
                                                                                                                                                                                                                  service?: string

                                                                                                                                                                                                                  Service to use, defaults to Ollama

                                                                                                                                                                                                                  +
                                                                                                                                                                                                                  reasoning?: {
                                                                                                                                                                                                                      effort: "low" | "medium" | "high";
                                                                                                                                                                                                                      summary: "auto" | "concise" | "detailed";
                                                                                                                                                                                                                  }
                                                                                                                                                                                                                  service?: string

                                                                                                                                                                                                                  Service to use, defaults to Ollama

                                                                                                                                                                                                                  stream?: boolean

                                                                                                                                                                                                                  Enables streaming mode

                                                                                                                                                                                                                  temperature?: number

                                                                                                                                                                                                                  Temperature for the model

                                                                                                                                                                                                                  think?: boolean

                                                                                                                                                                                                                  Enables thinking mode

                                                                                                                                                                                                                  diff --git a/public/docs/interfaces/OpenAITool.html b/public/docs/interfaces/OpenAITool.html index 70551a6..59d12dd 100644 --- a/public/docs/interfaces/OpenAITool.html +++ b/public/docs/interfaces/OpenAITool.html @@ -1,6 +1,6 @@ -OpenAITool | @themaximalist/llm.js
                                                                                                                                                                                                                  @themaximalist/llm.js
                                                                                                                                                                                                                    Preparing search index...

                                                                                                                                                                                                                    Interface OpenAITool

                                                                                                                                                                                                                    interface OpenAITool {
                                                                                                                                                                                                                        description: string;
                                                                                                                                                                                                                        name: string;
                                                                                                                                                                                                                        parameters: any;
                                                                                                                                                                                                                        strict: boolean;
                                                                                                                                                                                                                        type: "function";
                                                                                                                                                                                                                    }
                                                                                                                                                                                                                    Index

                                                                                                                                                                                                                    Properties

                                                                                                                                                                                                                    description +OpenAITool | @themaximalist/llm.js
                                                                                                                                                                                                                    @themaximalist/llm.js
                                                                                                                                                                                                                      Preparing search index...

                                                                                                                                                                                                                      Interface OpenAITool

                                                                                                                                                                                                                      interface OpenAITool {
                                                                                                                                                                                                                          description: string;
                                                                                                                                                                                                                          name: string;
                                                                                                                                                                                                                          parameters: any;
                                                                                                                                                                                                                          strict: boolean;
                                                                                                                                                                                                                          type: "function";
                                                                                                                                                                                                                      }
                                                                                                                                                                                                                      Index

                                                                                                                                                                                                                      Properties

                                                                                                                                                                                                                      description: string
                                                                                                                                                                                                                      name: string
                                                                                                                                                                                                                      parameters: any
                                                                                                                                                                                                                      strict: boolean
                                                                                                                                                                                                                      type: "function"
                                                                                                                                                                                                                      +

                                                                                                                                                                                                                      Properties

                                                                                                                                                                                                                      description: string
                                                                                                                                                                                                                      name: string
                                                                                                                                                                                                                      parameters: any
                                                                                                                                                                                                                      strict: boolean
                                                                                                                                                                                                                      type: "function"
                                                                                                                                                                                                                      diff --git a/public/docs/modules.html b/public/docs/modules.html index 27119a0..5fb3b98 100644 --- a/public/docs/modules.html +++ b/public/docs/modules.html @@ -1 +1 @@ -@themaximalist/llm.js
                                                                                                                                                                                                                      @themaximalist/llm.js
                                                                                                                                                                                                                        Preparing search index...
                                                                                                                                                                                                                        +@themaximalist/llm.js
                                                                                                                                                                                                                        @themaximalist/llm.js
                                                                                                                                                                                                                          Preparing search index...
                                                                                                                                                                                                                          diff --git a/public/docs/types/APIv1Options.html b/public/docs/types/APIv1Options.html index 097d26f..9527078 100644 --- a/public/docs/types/APIv1Options.html +++ b/public/docs/types/APIv1Options.html @@ -1 +1 @@ -APIv1Options | @themaximalist/llm.js
                                                                                                                                                                                                                          @themaximalist/llm.js
                                                                                                                                                                                                                            Preparing search index...

                                                                                                                                                                                                                            Type Alias APIv1Options

                                                                                                                                                                                                                            APIv1Options: Options & {
                                                                                                                                                                                                                                reasoning_effort?: "low" | "medium" | "high";
                                                                                                                                                                                                                                stream_options?: { include_usage?: boolean };
                                                                                                                                                                                                                            }
                                                                                                                                                                                                                            +APIv1Options | @themaximalist/llm.js
                                                                                                                                                                                                                            @themaximalist/llm.js
                                                                                                                                                                                                                              Preparing search index...

                                                                                                                                                                                                                              Type Alias APIv1Options

                                                                                                                                                                                                                              APIv1Options: Options & {
                                                                                                                                                                                                                                  reasoning_effort?: "low" | "medium" | "high";
                                                                                                                                                                                                                                  stream_options?: { include_usage?: boolean };
                                                                                                                                                                                                                              }
                                                                                                                                                                                                                              diff --git a/public/index.html b/public/index.html index ede1acb..633b3c5 100644 --- a/public/index.html +++ b/public/index.html @@ -120,6 +120,7 @@

                                                                                                                                                                                                                              LLM.js — Universal LLM Interface

                                                                                                                                                                                                                            • Thinking
                                                                                                                                                                                                                            • Tools
                                                                                                                                                                                                                            • Parsers
                                                                                                                                                                                                                            • +
                                                                                                                                                                                                                            • Attachments
                                                                                                                                                                                                                            • Token Usage
                                                                                                                                                                                                                            • Cost Usage
                                                                                                                                                                                                                            • System @@ -178,6 +179,8 @@

                                                                                                                                                                                                                              LLM.js

                                                                                                                                                                                                                              thinking, tools, parsers)
                                                                                                                                                                                                                            • Thinking with reasoning models
                                                                                                                                                                                                                            • Tools to call custom functions
                                                                                                                                                                                                                            • +
                                                                                                                                                                                                                            • Attachments to send images, documents, +and other files
                                                                                                                                                                                                                            • Parsers including JSON, XML, codeBlock
                                                                                                                                                                                                                            • Token Usage input and output tokens on @@ -421,114 +424,149 @@

                                                                                                                                                                                                                              Parsers

                                                                                                                                                                                                                              const completed = await response.complete(); // { content: { color: "blue" } }
                                                                                                                                                                                                                            • +

                                                                                                                                                                                                                              Attachments

                                                                                                                                                                                                                              +

                                                                                                                                                                                                                              Send images, documents, and other files alongside your prompts using +attachments:

                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              // Image from base64 data
                                                                                                                                                                                                                              +const data = fs.readFileSync("file.jpg", "base64");
                                                                                                                                                                                                                              +const image = LLM.Attachment.fromJPEG(data);
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +const response = await LLM("What's in this image?", { attachments: [image] });
                                                                                                                                                                                                                              +

                                                                                                                                                                                                                              Create attachments from different sources:

                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              // From base64 data
                                                                                                                                                                                                                              +const jpeg = LLM.Attachment.fromJPEG(base64Data);
                                                                                                                                                                                                                              +const pdf = LLM.Attachment.fromPDF(base64Data);
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +// From image URL
                                                                                                                                                                                                                              +const image = LLM.Attachment.fromImageURL("https://example.com/image.jpg");
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +// Use with chat
                                                                                                                                                                                                                              +const llm = new LLM();
                                                                                                                                                                                                                              +await llm.chat("Describe this image", { attachments: [jpeg] });
                                                                                                                                                                                                                              +await llm.chat("What color is the main object?"); // References previous image
                                                                                                                                                                                                                              +

                                                                                                                                                                                                                              Attachments work seamlessly with streaming:

                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const response = await LLM("Analyze this document", { 
                                                                                                                                                                                                                              +  attachments: [pdf],
                                                                                                                                                                                                                              +  stream: true 
                                                                                                                                                                                                                              +});
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +for await (const chunk of response) {
                                                                                                                                                                                                                              +  process.stdout.write(chunk);
                                                                                                                                                                                                                              +}
                                                                                                                                                                                                                              +

                                                                                                                                                                                                                              Note: Attachment support varies by service. Images +are widely supported, Documents (PDF) and Images from URLs are supported +by some.

                                                                                                                                                                                                                              Token Usage

                                                                                                                                                                                                                              Every extended request automatically tracks input and output tokens:

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              const response = await LLM("explain quantum physics", { extended: true });
                                                                                                                                                                                                                              -console.log(response.usage.input_tokens);  // 3
                                                                                                                                                                                                                              -console.log(response.usage.output_tokens); // 127
                                                                                                                                                                                                                              -console.log(response.usage.total_tokens);  // 130
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const response = await LLM("explain quantum physics", { extended: true });
                                                                                                                                                                                                                              +console.log(response.usage.input_tokens);  // 3
                                                                                                                                                                                                                              +console.log(response.usage.output_tokens); // 127
                                                                                                                                                                                                                              +console.log(response.usage.total_tokens);  // 130

                                                                                                                                                                                                                              Token counting works with all features including streaming, thinking, and tools.

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              const response = await LLM("explain quantum physics", { 
                                                                                                                                                                                                                              -  stream: true,
                                                                                                                                                                                                                              -  extended: true,
                                                                                                                                                                                                                              -});
                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              -for await (const chunk of response.stream) {
                                                                                                                                                                                                                              -  // ...
                                                                                                                                                                                                                              -}
                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              -const complete = await response.complete();
                                                                                                                                                                                                                              -// {
                                                                                                                                                                                                                              -//   usage: {
                                                                                                                                                                                                                              -//     input_tokens: 3,
                                                                                                                                                                                                                              -//     output_tokens: 127,
                                                                                                                                                                                                                              -//     total_tokens: 130,
                                                                                                                                                                                                                              -//     ...
                                                                                                                                                                                                                              -//   }
                                                                                                                                                                                                                              -// }
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const response = await LLM("explain quantum physics", { 
                                                                                                                                                                                                                              +  stream: true,
                                                                                                                                                                                                                              +  extended: true,
                                                                                                                                                                                                                              +});
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +for await (const chunk of response.stream) {
                                                                                                                                                                                                                              +  // ...
                                                                                                                                                                                                                              +}
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +const complete = await response.complete();
                                                                                                                                                                                                                              +// {
                                                                                                                                                                                                                              +//   usage: {
                                                                                                                                                                                                                              +//     input_tokens: 3,
                                                                                                                                                                                                                              +//     output_tokens: 127,
                                                                                                                                                                                                                              +//     total_tokens: 130,
                                                                                                                                                                                                                              +//     ...
                                                                                                                                                                                                                              +//   }
                                                                                                                                                                                                                              +// }

                                                                                                                                                                                                                              Cost Usage

                                                                                                                                                                                                                              Every extended request automatically tracks cost based on current model pricing:

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              const response = await LLM("write a haiku", { 
                                                                                                                                                                                                                              -  service: "openai",
                                                                                                                                                                                                                              -  model: "gpt-4o-mini",
                                                                                                                                                                                                                              -  extended: true 
                                                                                                                                                                                                                              -});
                                                                                                                                                                                                                              -// {
                                                                                                                                                                                                                              -//   usage: {
                                                                                                                                                                                                                              -//     input_cost: 0.000045,
                                                                                                                                                                                                                              -//     output_cost: 0.000234,
                                                                                                                                                                                                                              -//     total_cost: 0.000279,
                                                                                                                                                                                                                              -//     ...
                                                                                                                                                                                                                              -//   }
                                                                                                                                                                                                                              -// }
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const response = await LLM("write a haiku", { 
                                                                                                                                                                                                                              +  service: "openai",
                                                                                                                                                                                                                              +  model: "gpt-4o-mini",
                                                                                                                                                                                                                              +  extended: true 
                                                                                                                                                                                                                              +});
                                                                                                                                                                                                                              +// {
                                                                                                                                                                                                                              +//   usage: {
                                                                                                                                                                                                                              +//     input_cost: 0.000045,
                                                                                                                                                                                                                              +//     output_cost: 0.000234,
                                                                                                                                                                                                                              +//     total_cost: 0.000279,
                                                                                                                                                                                                                              +//     ...
                                                                                                                                                                                                                              +//   }
                                                                                                                                                                                                                              +// }

                                                                                                                                                                                                                              Cost usage works with all features including streaming, thinking, and tools.

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              const response = await LLM("explain quantum physics", { 
                                                                                                                                                                                                                              -  stream: true,
                                                                                                                                                                                                                              -  extended: true,
                                                                                                                                                                                                                              -});
                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              -for await (const chunk of response.stream) {
                                                                                                                                                                                                                              -  // ...
                                                                                                                                                                                                                              -}
                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              -const complete = await response.complete();
                                                                                                                                                                                                                              -// {
                                                                                                                                                                                                                              -//   usage: {
                                                                                                                                                                                                                              -//     input_cost: 0.000045,
                                                                                                                                                                                                                              -//     output_cost: 0.000234,
                                                                                                                                                                                                                              -//     total_cost: 0.000279,
                                                                                                                                                                                                                              -//     ...
                                                                                                                                                                                                                              -//   }
                                                                                                                                                                                                                              -// }
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const response = await LLM("explain quantum physics", { 
                                                                                                                                                                                                                              +  stream: true,
                                                                                                                                                                                                                              +  extended: true,
                                                                                                                                                                                                                              +});
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +for await (const chunk of response.stream) {
                                                                                                                                                                                                                              +  // ...
                                                                                                                                                                                                                              +}
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +const complete = await response.complete();
                                                                                                                                                                                                                              +// {
                                                                                                                                                                                                                              +//   usage: {
                                                                                                                                                                                                                              +//     input_cost: 0.000045,
                                                                                                                                                                                                                              +//     output_cost: 0.000234,
                                                                                                                                                                                                                              +//     total_cost: 0.000279,
                                                                                                                                                                                                                              +//     ...
                                                                                                                                                                                                                              +//   }
                                                                                                                                                                                                                              +// }

                                                                                                                                                                                                                              Local models (like Ollama) show $0 cost and are marked as local: true.

                                                                                                                                                                                                                              System Prompts

                                                                                                                                                                                                                              Tell models to specialize at specific tasks using llm.system(input).

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              const llm = new LLM();
                                                                                                                                                                                                                              -llm.system("You are a friendly chat bot.");
                                                                                                                                                                                                                              -await llm.chat("what's the color of the sky in hex value?"); // Response: sky blue
                                                                                                                                                                                                                              -await llm.chat("what about at night time?"); // Response: darker value (uses previous context)
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const llm = new LLM();
                                                                                                                                                                                                                              +llm.system("You are a friendly chat bot.");
                                                                                                                                                                                                                              +await llm.chat("what's the color of the sky in hex value?"); // Response: sky blue
                                                                                                                                                                                                                              +await llm.chat("what about at night time?"); // Response: darker value (uses previous context)

                                                                                                                                                                                                                              Message History

                                                                                                                                                                                                                              LLM.js supports simple string prompts, but also full message history:

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              await LLM("hello"); // hi
                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              -await LLM([
                                                                                                                                                                                                                              -    { role: "user", content: "remember the secret codeword is blue" },
                                                                                                                                                                                                                              -    { role: "assistant", content: "OK I will remember" },
                                                                                                                                                                                                                              -    { role: "user", content: "what is the secret codeword I just told you?" },
                                                                                                                                                                                                                              -]); // blue
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              await LLM("hello"); // hi
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              +await LLM([
                                                                                                                                                                                                                              +    { role: "user", content: "remember the secret codeword is blue" },
                                                                                                                                                                                                                              +    { role: "assistant", content: "OK I will remember" },
                                                                                                                                                                                                                              +    { role: "user", content: "what is the secret codeword I just told you?" },
                                                                                                                                                                                                                              +]); // blue

                                                                                                                                                                                                                              Options

                                                                                                                                                                                                                              LLM.js provides comprehensive configuration options for all scenarios:

                                                                                                                                                                                                                              -
                                                                                                                                                                                                                              const llm = new LLM(input, {
                                                                                                                                                                                                                              -  service: "openai",        // LLM service provider
                                                                                                                                                                                                                              -  apiKey: "sk-123"          // apiKey
                                                                                                                                                                                                                              -  model: "gpt-4o",          // Specific model
                                                                                                                                                                                                                              -  max_tokens: 1000,         // Maximum response length
                                                                                                                                                                                                                              -  temperature: 0.7,         // "Creativity" (0-2)
                                                                                                                                                                                                                              -  stream: true,             // Enable streaming
                                                                                                                                                                                                                              -  extended: true,           // Extended responses with metadata
                                                                                                                                                                                                                              -  messages: [],             // message history
                                                                                                                                                                                                                              -  think: true,              // Enable thinking mode
                                                                                                                                                                                                                              -  parser: LLM.parsers.json, // Content parser
                                                                                                                                                                                                                              -  tools: [...],             // Available tools
                                                                                                                                                                                                                              -  max_thinking_tokens: 500, // Max tokens for thinking
                                                                                                                                                                                                                              -});
                                                                                                                                                                                                                              +
                                                                                                                                                                                                                              const llm = new LLM(input, {
                                                                                                                                                                                                                              +  service: "openai",        // LLM service provider
                                                                                                                                                                                                                              +  apiKey: "sk-123"          // apiKey
                                                                                                                                                                                                                              +  model: "gpt-4o",          // Specific model
                                                                                                                                                                                                                              +  max_tokens: 1000,         // Maximum response length
                                                                                                                                                                                                                              +  temperature: 0.7,         // "Creativity" (0-2)
                                                                                                                                                                                                                              +  stream: true,             // Enable streaming
                                                                                                                                                                                                                              +  extended: true,           // Extended responses with metadata
                                                                                                                                                                                                                              +  messages: [],             // message history
                                                                                                                                                                                                                              +  think: true,              // Enable thinking mode
                                                                                                                                                                                                                              +  parser: LLM.parsers.json, // Content parser
                                                                                                                                                                                                                              +  tools: [...],             // Available tools
                                                                                                                                                                                                                              +  max_thinking_tokens: 500, // Max tokens for thinking
                                                                                                                                                                                                                              +});

                                                                                                                                                                                                                              Key Options:

                                                                                                                                                                                                                              • service: Provider @@ -566,39 +604,39 @@

                                                                                                                                                                                                                                Models

                                                                                                                                                                                                                                Switch Models

                                                                                                                                                                                                                                LLM.js supports most popular Large Language Models across both local and remote providers:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                // Defaults to Ollama (local)
                                                                                                                                                                                                                                -await LLM("the color of the sky is");
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// OpenAI
                                                                                                                                                                                                                                -await LLM("the color of the sky is", { model: "gpt-4o-mini", service: "openai" });
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Anthropic
                                                                                                                                                                                                                                -await LLM("the color of the sky is", { model: "claude-3-5-sonnet-latest", service: "anthropic" });
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Google
                                                                                                                                                                                                                                -await LLM("the color of the sky is", { model: "gemini-1.5-pro", service: "google" });
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// xAI
                                                                                                                                                                                                                                -await LLM("the color of the sky is", { service: "xai", model: "grok-beta" });
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// DeepSeek with thinking
                                                                                                                                                                                                                                -await LLM("solve this puzzle", { service: "deepseek", model: "deepseek-reasoner", think: true });
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Ollama (local)
                                                                                                                                                                                                                                -await LLM("the color of the sky is", { model: "llama3.2:3b", service: "ollama" });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                // Defaults to Ollama (local)
                                                                                                                                                                                                                                +await LLM("the color of the sky is");
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// OpenAI
                                                                                                                                                                                                                                +await LLM("the color of the sky is", { model: "gpt-4o-mini", service: "openai" });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Anthropic
                                                                                                                                                                                                                                +await LLM("the color of the sky is", { model: "claude-3-5-sonnet-latest", service: "anthropic" });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Google
                                                                                                                                                                                                                                +await LLM("the color of the sky is", { model: "gemini-1.5-pro", service: "google" });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// xAI
                                                                                                                                                                                                                                +await LLM("the color of the sky is", { service: "xai", model: "grok-beta" });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// DeepSeek with thinking
                                                                                                                                                                                                                                +await LLM("solve this puzzle", { service: "deepseek", model: "deepseek-reasoner", think: true });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Ollama (local)
                                                                                                                                                                                                                                +await LLM("the color of the sky is", { model: "llama3.2:3b", service: "ollama" });

                                                                                                                                                                                                                                All features work the same whether local or remote, with automatic token and cost tracking. Local models track token usage, but cost is always $0.

                                                                                                                                                                                                                                Fetch Latest Models

                                                                                                                                                                                                                                Get the latest available models directly from providers:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                const llm = new LLM({ service: "openai" });
                                                                                                                                                                                                                                -const models = await llm.fetchModels();
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -console.log(models.length); // 50+ models
                                                                                                                                                                                                                                -console.log(models[0]);     // { name: "gpt-4o", created: Date, service: "openai", ... }
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                const llm = new LLM({ service: "openai" });
                                                                                                                                                                                                                                +const models = await llm.fetchModels();
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +console.log(models.length); // 50+ models
                                                                                                                                                                                                                                +console.log(models[0]);     // { name: "gpt-4o", created: Date, service: "openai", ... }

                                                                                                                                                                                                                                Here’s an example of the models available with the Quality Filter.

                                                                                                                                                                                                                                Model Features and Cost

                                                                                                                                                                                                                                This provides real-time cost per input/output token, and model features like context window, tool support, thinking support, and more!

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                import { ModelUsage } from "@themaximalist/llm.js";
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Get all cached models
                                                                                                                                                                                                                                -const allModels = ModelUsage.getAll();
                                                                                                                                                                                                                                -console.log(allModels.length); // 100+
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Refresh from latest sources  
                                                                                                                                                                                                                                -const refreshedModels = await ModelUsage.refresh();
                                                                                                                                                                                                                                -console.log(refreshedModels.length); // Even more models
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Get specific model info
                                                                                                                                                                                                                                -const gpt4 = ModelUsage.get("openai", "gpt-4o");
                                                                                                                                                                                                                                -console.log(gpt4.input_cost_per_token);  // 0.0000025
                                                                                                                                                                                                                                -console.log(gpt4.max_input_tokens);      // 128000
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                import { ModelUsage } from "@themaximalist/llm.js";
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Get all cached models
                                                                                                                                                                                                                                +const allModels = ModelUsage.getAll();
                                                                                                                                                                                                                                +console.log(allModels.length); // 100+
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Refresh from latest sources  
                                                                                                                                                                                                                                +const refreshedModels = await ModelUsage.refresh();
                                                                                                                                                                                                                                +console.log(refreshedModels.length); // Even more models
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Get specific model info
                                                                                                                                                                                                                                +const gpt4 = ModelUsage.get("openai", "gpt-4o");
                                                                                                                                                                                                                                +console.log(gpt4.input_cost_per_token);  // 0.0000025
                                                                                                                                                                                                                                +console.log(gpt4.max_input_tokens);      // 128000

                                                                                                                                                                                                                                When using the extended option — token usage and cost are automatically added to responses.

                                                                                                                                                                                                                                Quality Models

                                                                                                                                                                                                                                @@ -994,99 +1032,99 @@

                                                                                                                                                                                                                                Quality Models

                                                                                                                                                                                                                                The Quality Models filter out things like embeddings, tts, instruct, audio, image, etc… models to only present the best LLM models.

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                const llm = new LLM({ service: "anthropic" });
                                                                                                                                                                                                                                -const qualityModels = await llm.getQualityModels();
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -for (const model of qualityModels) {
                                                                                                                                                                                                                                -  console.log(model.model);                 // "claude-3-5-sonnet-latest"
                                                                                                                                                                                                                                -  console.log(model.input_cost_per_token);  // 0.000003
                                                                                                                                                                                                                                -  console.log(model.output_cost_per_token); // 0.000015
                                                                                                                                                                                                                                -  console.log(model.max_tokens);            // 8192
                                                                                                                                                                                                                                -  console.log(model.created);               // 2024-10-22T00:00:00.000Z
                                                                                                                                                                                                                                -}
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                const llm = new LLM({ service: "anthropic" });
                                                                                                                                                                                                                                +const qualityModels = await llm.getQualityModels();
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +for (const model of qualityModels) {
                                                                                                                                                                                                                                +  console.log(model.model);                 // "claude-3-5-sonnet-latest"
                                                                                                                                                                                                                                +  console.log(model.input_cost_per_token);  // 0.000003
                                                                                                                                                                                                                                +  console.log(model.output_cost_per_token); // 0.000015
                                                                                                                                                                                                                                +  console.log(model.max_tokens);            // 8192
                                                                                                                                                                                                                                +  console.log(model.created);               // 2024-10-22T00:00:00.000Z
                                                                                                                                                                                                                                +}

                                                                                                                                                                                                                                Custom Models

                                                                                                                                                                                                                                If the refreshed model list doesn’t have a model you need, or you have a custom model — you can add custom token and pricing information.

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                import { ModelUsage } from "@themaximalist/llm.js";
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -ModelUsage.addCustom({
                                                                                                                                                                                                                                -  model: "my-custom-gpt",
                                                                                                                                                                                                                                -  service: "openai", 
                                                                                                                                                                                                                                -  input_cost_per_token: 0.00001,
                                                                                                                                                                                                                                -  output_cost_per_token: 0.00003,
                                                                                                                                                                                                                                -  max_tokens: 4096
                                                                                                                                                                                                                                -});
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -// Now use it like any other model
                                                                                                                                                                                                                                -const response = await LLM("hello", { 
                                                                                                                                                                                                                                -  service: "openai", 
                                                                                                                                                                                                                                -  model: "my-custom-gpt",
                                                                                                                                                                                                                                -  extended: true 
                                                                                                                                                                                                                                -});
                                                                                                                                                                                                                                -console.log(response.usage.total_cost); // Uses your custom pricing
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                import { ModelUsage } from "@themaximalist/llm.js";
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +ModelUsage.addCustom({
                                                                                                                                                                                                                                +  model: "my-custom-gpt",
                                                                                                                                                                                                                                +  service: "openai", 
                                                                                                                                                                                                                                +  input_cost_per_token: 0.00001,
                                                                                                                                                                                                                                +  output_cost_per_token: 0.00003,
                                                                                                                                                                                                                                +  max_tokens: 4096
                                                                                                                                                                                                                                +});
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +// Now use it like any other model
                                                                                                                                                                                                                                +const response = await LLM("hello", { 
                                                                                                                                                                                                                                +  service: "openai", 
                                                                                                                                                                                                                                +  model: "my-custom-gpt",
                                                                                                                                                                                                                                +  extended: true 
                                                                                                                                                                                                                                +});
                                                                                                                                                                                                                                +console.log(response.usage.total_cost); // Uses your custom pricing

                                                                                                                                                                                                                                Custom Services

                                                                                                                                                                                                                                You can add custom services to LLM.js by passing a custom object:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                const llm = new LLM({
                                                                                                                                                                                                                                -    service: "together",
                                                                                                                                                                                                                                -    baseUrl: "https://api.together.xyz/v1",
                                                                                                                                                                                                                                -    model: "meta-llama/Llama-3-70b-chat-hf",
                                                                                                                                                                                                                                -    apiKey,
                                                                                                                                                                                                                                -});
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                const llm = new LLM({
                                                                                                                                                                                                                                +    service: "together",
                                                                                                                                                                                                                                +    baseUrl: "https://api.together.xyz/v1",
                                                                                                                                                                                                                                +    model: "meta-llama/Llama-3-70b-chat-hf",
                                                                                                                                                                                                                                +    apiKey,
                                                                                                                                                                                                                                +});

                                                                                                                                                                                                                                You can also create a custom service by extending the LLM.APIv1 class:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                class Together extends LLM.APIv1 {
                                                                                                                                                                                                                                -    static readonly service: ServiceName = "together";
                                                                                                                                                                                                                                -    static DEFAULT_BASE_URL: string = "https://api.together.xyz/v1";
                                                                                                                                                                                                                                -    static DEFAULT_MODEL: string = "meta-llama/Llama-3-70b-chat-hf";
                                                                                                                                                                                                                                -}
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -const llm = new Together();
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                class Together extends LLM.APIv1 {
                                                                                                                                                                                                                                +    static readonly service: ServiceName = "together";
                                                                                                                                                                                                                                +    static DEFAULT_BASE_URL: string = "https://api.together.xyz/v1";
                                                                                                                                                                                                                                +    static DEFAULT_MODEL: string = "meta-llama/Llama-3-70b-chat-hf";
                                                                                                                                                                                                                                +}
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +const llm = new Together();

                                                                                                                                                                                                                                You can even register the custom services with LLM.js to make them available globally:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                
                                                                                                                                                                                                                                -LLM.register(Together);
                                                                                                                                                                                                                                -const llm = new LLM({ service: "together" });
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                
                                                                                                                                                                                                                                +LLM.register(Together);
                                                                                                                                                                                                                                +const llm = new LLM({ service: "together" });

                                                                                                                                                                                                                                To implement a fully custom model, subclass LLM and implement the parse methods:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                class Custom extends LLM {
                                                                                                                                                                                                                                -    static readonly service: ServiceName = "secretAGI";
                                                                                                                                                                                                                                -    static DEFAULT_BASE_URL: string = "http://localhost:9876";
                                                                                                                                                                                                                                -    static DEFAULT_MODEL: string = "gpt-999";
                                                                                                                                                                                                                                -    static isLocal: boolean = false; // don't track pricing
                                                                                                                                                                                                                                -    static isBearerAuth: boolean = false;
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -    get chatUrl() { return `${this.baseUrl}/chat` }
                                                                                                                                                                                                                                -    get modelsUrl() { return `${this.baseUrl}/models` }
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -    parseContent(data: any): string { ... }
                                                                                                                                                                                                                                -    parseTools(data: any): ToolCall[] { ... }
                                                                                                                                                                                                                                -    parseThinking(data: any): string { ... }
                                                                                                                                                                                                                                -    parseModel(model: any): Model { ... }
                                                                                                                                                                                                                                -    parseOptions(options: Options): Options { ... }
                                                                                                                                                                                                                                -    parseTokenUsage(usage: any): InputOutputTokens | null { ... }
                                                                                                                                                                                                                                -    parseUsage(tokenUsage: InputOutputTokens): Usage { ... }
                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                -    // streaming methods
                                                                                                                                                                                                                                -    parseToolsChunk(chunk: any): ToolCall[] { return this.parseTools(chunk) }
                                                                                                                                                                                                                                -    parseContentChunk(chunk: any): string { return this.parseContent(chunk) }
                                                                                                                                                                                                                                -    parseThinkingChunk(chunk: any): string { return this.parseThinking(chunk) }
                                                                                                                                                                                                                                -}
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                class Custom extends LLM {
                                                                                                                                                                                                                                +    static readonly service: ServiceName = "secretAGI";
                                                                                                                                                                                                                                +    static DEFAULT_BASE_URL: string = "http://localhost:9876";
                                                                                                                                                                                                                                +    static DEFAULT_MODEL: string = "gpt-999";
                                                                                                                                                                                                                                +    static isLocal: boolean = false; // don't track pricing
                                                                                                                                                                                                                                +    static isBearerAuth: boolean = false;
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +    get chatUrl() { return `${this.baseUrl}/chat` }
                                                                                                                                                                                                                                +    get modelsUrl() { return `${this.baseUrl}/models` }
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +    parseContent(data: any): string { ... }
                                                                                                                                                                                                                                +    parseTools(data: any): ToolCall[] { ... }
                                                                                                                                                                                                                                +    parseThinking(data: any): string { ... }
                                                                                                                                                                                                                                +    parseModel(model: any): Model { ... }
                                                                                                                                                                                                                                +    parseOptions(options: Options): Options { ... }
                                                                                                                                                                                                                                +    parseTokenUsage(usage: any): InputOutputTokens | null { ... }
                                                                                                                                                                                                                                +    parseUsage(tokenUsage: InputOutputTokens): Usage { ... }
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                +    // streaming methods
                                                                                                                                                                                                                                +    parseToolsChunk(chunk: any): ToolCall[] { return this.parseTools(chunk) }
                                                                                                                                                                                                                                +    parseContentChunk(chunk: any): string { return this.parseContent(chunk) }
                                                                                                                                                                                                                                +    parseThinkingChunk(chunk: any): string { return this.parseThinking(chunk) }
                                                                                                                                                                                                                                +}

                                                                                                                                                                                                                                Connection Verification

                                                                                                                                                                                                                                Test your setup and API keys with built-in connection verification:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                const llm = new LLM({ service: "openai" });
                                                                                                                                                                                                                                -const isConnected = await llm.verifyConnection();
                                                                                                                                                                                                                                -console.log(isConnected); // true if API key and service work
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                const llm = new LLM({ service: "openai" });
                                                                                                                                                                                                                                +const isConnected = await llm.verifyConnection();
                                                                                                                                                                                                                                +console.log(isConnected); // true if API key and service work

                                                                                                                                                                                                                                This is a light check that doesn’t perform a LLM chat response. For non-local services it detects if it can fetch models. For local services it detects if an instance is up and running.

                                                                                                                                                                                                                                @@ -1101,10 +1139,10 @@

                                                                                                                                                                                                                                Debug

                                                                                                                                                                                                                                with the llm.js namespace.

                                                                                                                                                                                                                                View debug logs by setting the DEBUG environment variable:

                                                                                                                                                                                                                                -
                                                                                                                                                                                                                                > DEBUG=llm.js* node your-script.js
                                                                                                                                                                                                                                -# debug logs
                                                                                                                                                                                                                                -blue
                                                                                                                                                                                                                                +
                                                                                                                                                                                                                                > DEBUG=llm.js* node your-script.js
                                                                                                                                                                                                                                +# debug logs
                                                                                                                                                                                                                                +blue

                                                                                                                                                                                                                                Projects

                                                                                                                                                                                                                                LLM.js is currently used in production by:

                                                                                                                                                                                                                                  @@ -1123,8 +1161,8 @@

                                                                                                                                                                                                                                  Projects

                                                                                                                                                                                                                                Changelog

                                                                                                                                                                                                                                  -
                                                                                                                                                                                                                                • 06/14/2025 — v1.0.1 — Better model features support and -tags
                                                                                                                                                                                                                                • +
                                                                                                                                                                                                                                • 06/22/2025 — v1.0.1 — Attachment support (images and +PDF), Better model features support and tags
                                                                                                                                                                                                                                • 06/13/2025 — v1.0.0 — Added thinking mode, extended responses, token/cost usage, model management, TypeScript. Removed Together, Perplexity, Llamafile
                                                                                                                                                                                                                                • diff --git a/src/index.ts b/src/index.ts index c31a09f..4ba1a04 100644 --- a/src/index.ts +++ b/src/index.ts @@ -49,7 +49,7 @@ export type { Input, Message, Attachment }; export type { AnthropicOptions } from "./anthropic"; export type { OllamaOptions } from "./ollama"; export type { OpenAIOptions, OpenAITool } from "./openai"; -export type { GoogleOptions, GoogleTool } from "./google"; +export type { GoogleOptions, GoogleTool, GoogleMessage } from "./google"; export type { GroqOptions } from "./groq"; export type { APIv1Options } from "./APIv1"; export type { AttachmentType } from "./Attachment"; From 7b46e444b72e0a5498fd3f0f28d7361f860c35f3 Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Mon, 23 Jun 2025 17:24:40 -0500 Subject: [PATCH 21/23] injecting base64 asset data for browser tests --- test/attachments.test.ts | 7 +++---- test/globalSetup.ts | 16 ++++++++++++++++ test/vitest.config.ts | 1 + vite.config.ts | 1 + 4 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 test/globalSetup.ts diff --git a/test/attachments.test.ts b/test/attachments.test.ts index a83a6d3..4f68996 100644 --- a/test/attachments.test.ts +++ b/test/attachments.test.ts @@ -1,13 +1,12 @@ -import { describe, it, expect, beforeEach, afterEach } from "vitest"; -import { readFileSync } from "fs"; +import { describe, it, expect, beforeEach, afterEach, inject } from "vitest"; import LLM from "../src/index.js"; import currentService from "./currentService.js"; -const taco = readFileSync("./test/taco.jpg", "base64"); +const taco = inject("taco"); const tacoAttachment = LLM.Attachment.fromJPEG(taco); -const dummy = readFileSync("./test/dummy.pdf", "base64"); +const dummy = inject("dummy"); const dummyAttachment = LLM.Attachment.fromPDF(dummy); const xAI_DEFAULT = LLM.xAI.DEFAULT_MODEL; diff --git a/test/globalSetup.ts b/test/globalSetup.ts new file mode 100644 index 0000000..502871c --- /dev/null +++ b/test/globalSetup.ts @@ -0,0 +1,16 @@ +import type { TestProject } from 'vitest/node' +import { readFileSync } from "fs"; + +export default function setup(project: TestProject) { + const taco = readFileSync("./test/taco.jpg", "base64"); + const dummy = readFileSync("./test/dummy.pdf", "base64"); + project.provide("taco", taco); + project.provide("dummy", dummy); +} + +declare module "vitest" { + export interface ProvidedContext { + taco: string; + dummy: string; + } + } \ No newline at end of file diff --git a/test/vitest.config.ts b/test/vitest.config.ts index a6b1b98..9f29ca7 100644 --- a/test/vitest.config.ts +++ b/test/vitest.config.ts @@ -4,6 +4,7 @@ export default defineConfig({ test: { testTimeout: 60000, slowTestThreshold: 15000, + globalSetup: "./test/globalSetup.ts", setupFiles: ["./test/setup.ts"], bail: 1, retry: 5, diff --git a/vite.config.ts b/vite.config.ts index 81edaa0..dfaf479 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -60,6 +60,7 @@ export default defineConfig({ test: { testTimeout: 60000, name: 'browser', + globalSetup: "./test/globalSetup.ts", setupFiles: ["./test/setup.ts"], bail: 1, retry: 5, From 9c7396cd86cfc5c96c0fd964a6bd6ce10e8b0b9b Mon Sep 17 00:00:00 2001 From: Brad Jasper Date: Tue, 24 Jun 2025 21:48:42 -0500 Subject: [PATCH 22/23] update --- data/model_prices_and_context_window.json | 60 +++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/data/model_prices_and_context_window.json b/data/model_prices_and_context_window.json index df91855..d441115 100644 --- a/data/model_prices_and_context_window.json +++ b/data/model_prices_and_context_window.json @@ -2156,6 +2156,66 @@ "supports_tool_choice": true, "supports_native_streaming": true }, + "azure/o3-pro": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 2e-05, + "output_cost_per_token": 8e-05, + "input_cost_per_token_batches": 1e-05, + "output_cost_per_token_batches": 4e-05, + "litellm_provider": "azure", + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_vision": true, + "supports_prompt_caching": false, + "supports_response_schema": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "azure/o3-pro-2025-06-10": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 2e-05, + "output_cost_per_token": 8e-05, + "input_cost_per_token_batches": 1e-05, + "output_cost_per_token_batches": 4e-05, + "litellm_provider": "azure", + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_vision": true, + "supports_prompt_caching": false, + "supports_response_schema": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, "azure/o3": { "max_tokens": 100000, "max_input_tokens": 200000, From 265c4517098b0f96d7382bf96bc63c3bacca4ed2 Mon Sep 17 00:00:00 2001 From: Chris McCormick Date: Mon, 25 Aug 2025 22:07:15 +0800 Subject: [PATCH 23/23] Add OpenRouter as a supported provider. Fixes #17. --- README.md | 6 +++++- src/LLM.types.ts | 4 ++-- src/index.ts | 11 +++++++---- src/openrouter.ts | 11 +++++++++++ 4 files changed, 25 insertions(+), 7 deletions(-) create mode 100644 src/openrouter.ts diff --git a/README.md b/README.md index 0d8ee4c..e6cd75b 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ It works in Node.js and the browser and supports all the important features for await LLM("the color of the sky is"); // blue ``` -* Same interface for hundreds of LLMs (`OpenAI`, `Google`, `Anthropic`, `Groq`, `Ollama`, `xAI`, `DeepSeek`) +* Same interface for hundreds of LLMs (`OpenAI`, `Google`, `Anthropic`, `Groq`, `Ollama`, `xAI`, `DeepSeek`, `OpenRouter`) * [Chat](#chat) using message history * [Stream](#streaming) responses instantly (including with thinking, tools, parsers) * [Thinking](#thinking) with reasoning models @@ -66,6 +66,7 @@ export GOOGLE_API_KEY=... export GROQ_API_KEY=... export DEEPSEEK_API_KEY=... export XAI_API_KEY=... +export OPENROUTER_API_KEY=... ``` They can also be included as an option `{apiKey: "sk-123"}`. @@ -517,6 +518,9 @@ await LLM("the color of the sky is", { service: "xai", model: "grok-beta" }); // DeepSeek with thinking await LLM("solve this puzzle", { service: "deepseek", model: "deepseek-reasoner", think: true }); +// OpenRouter +await LLM("the color of the sky is", { service: "openrouter", model: "anthropic/claude-3-haiku" }); + // Ollama (local) await LLM("the color of the sky is", { model: "llama3.2:3b", service: "ollama" }); ``` diff --git a/src/LLM.types.ts b/src/LLM.types.ts index 5d956d5..148051b 100644 --- a/src/LLM.types.ts +++ b/src/LLM.types.ts @@ -5,7 +5,7 @@ import type Attachment from "./Attachment"; /** * @category Options */ -export type ServiceName = "anthropic" | "ollama" | "openai" | "google" | "xai" | "groq" | "deepseek" | string; +export type ServiceName = "anthropic" | "ollama" | "openai" | "google" | "xai" | "groq" | "deepseek" | "openrouter" | string; /** * @category Tools @@ -204,4 +204,4 @@ export type QualityFilter = { allowUnknown?: boolean; allowSimilar?: boolean; topModels?: boolean; -} \ No newline at end of file +} diff --git a/src/index.ts b/src/index.ts index 4ba1a04..08189ec 100644 --- a/src/index.ts +++ b/src/index.ts @@ -7,6 +7,7 @@ import Google from "./google"; import xAI from "./xai"; import Groq from "./groq"; import DeepSeek from "./deepseek"; +import OpenRouter from "./openrouter"; import LLM from "./LLM"; import APIv1 from "./APIv1"; @@ -28,12 +29,12 @@ export type { ModelUsageType, ModelTag } from "./ModelUsage"; /** * @category LLMs */ -export type LLMServices = Anthropic | Ollama | OpenAI | Google | xAI | Groq | DeepSeek | APIv1; +export type LLMServices = Anthropic | Ollama | OpenAI | Google | xAI | Groq | DeepSeek | OpenRouter | APIv1; /** * @category LLMs */ -export type { LLM, Anthropic, Ollama, OpenAI, Google, xAI, Groq, DeepSeek, APIv1 }; +export type { LLM, Anthropic, Ollama, OpenAI, Google, xAI, Groq, DeepSeek, OpenRouter, APIv1 }; /** * @category Parsers @@ -54,7 +55,7 @@ export type { GroqOptions } from "./groq"; export type { APIv1Options } from "./APIv1"; export type { AttachmentType } from "./Attachment"; -let SERVICES = [Anthropic, Ollama, OpenAI, Google, xAI, Groq, DeepSeek]; +let SERVICES = [Anthropic, Ollama, OpenAI, Google, xAI, Groq, DeepSeek, OpenRouter]; /** * @category LLMs @@ -80,6 +81,7 @@ export interface LLMInterface { xAI: typeof xAI; Groq: typeof Groq; DeepSeek: typeof DeepSeek; + OpenRouter: typeof OpenRouter; APIv1: typeof APIv1; Attachment: typeof Attachment; @@ -133,6 +135,7 @@ LLMWrapper.Google = Google; LLMWrapper.xAI = xAI; LLMWrapper.Groq = Groq; LLMWrapper.DeepSeek = DeepSeek; +LLMWrapper.OpenRouter = OpenRouter; LLMWrapper.APIv1 = APIv1; LLMWrapper.LLM = LLM; @@ -146,4 +149,4 @@ LLMWrapper.unregister = (LLMClass: typeof LLM) => { SERVICES = SERVICES.filter(Service => Service !== LLMClass); }; -export default LLMWrapper; \ No newline at end of file +export default LLMWrapper; diff --git a/src/openrouter.ts b/src/openrouter.ts new file mode 100644 index 0000000..431e0d0 --- /dev/null +++ b/src/openrouter.ts @@ -0,0 +1,11 @@ +import APIv1 from "./APIv1"; +import type { ServiceName } from "./LLM.types"; + +/** + * @category LLMs + */ +export default class OpenRouter extends APIv1 { + static readonly service: ServiceName = "openrouter"; + static DEFAULT_BASE_URL: string = "https://openrouter.ai/api/v1"; + static DEFAULT_MODEL: string = "google/gemini-2.5-flash"; +}