diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index ca1d94e..0ee8c01 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.2.17"
+ ".": "0.3.0"
}
diff --git a/.stats.yml b/.stats.yml
index bd77bef..c93a005 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 106
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-f59f1c7d33001d60b5190f68aa49eacec90f05dbe694620b8916152c3922051d.yml
-openapi_spec_hash: 804edd2e834493906dc430145402be3b
-config_hash: de16e52db65de71ac35adcdb665a74f5
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-1c5bc84a8d003bb986b2cc2a7bb6a5232ab0514469a7f67ddbf58c06de248a03.yml
+openapi_spec_hash: ad377dd848973abb656ca35f5fdc93df
+config_hash: e67fd054e95c1e82f78f4b834e96bb65
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f3992c6..e754af9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,27 @@
# Changelog
+## 0.3.0 (2025-08-13)
+
+Full Changelog: [v0.2.17...v0.3.0](https://github.com/llamastack/llama-stack-client-typescript/compare/v0.2.17...v0.3.0)
+
+### Features
+
+* **api:** update via SDK Studio ([f35b8f2](https://github.com/llamastack/llama-stack-client-typescript/commit/f35b8f26d1a3783b1768f0fdae7ded0c64123d5d))
+* **api:** update via SDK Studio ([7cf0786](https://github.com/llamastack/llama-stack-client-typescript/commit/7cf0786e73360062d59f0c329383cc419bb14c3b))
+* **api:** update via SDK Studio ([add2c59](https://github.com/llamastack/llama-stack-client-typescript/commit/add2c593baa8bb791cafbb129d5e0b5669aa6e98))
+* **api:** update via SDK Studio ([e402b8d](https://github.com/llamastack/llama-stack-client-typescript/commit/e402b8ddccbea7197c186b7ceab2c346045962c1))
+* **api:** update via SDK Studio ([2185339](https://github.com/llamastack/llama-stack-client-typescript/commit/21853392ad1c0a9c829405eeb0ece6504dd1be70))
+* **api:** update via SDK Studio ([4a43a8f](https://github.com/llamastack/llama-stack-client-typescript/commit/4a43a8ff19d7e1ea175ed528dc562e6308ced95f))
+* **api:** update via SDK Studio ([fa60dbf](https://github.com/llamastack/llama-stack-client-typescript/commit/fa60dbfdc63e9ed638946efca0d4d02954909df9))
+* **api:** update via SDK Studio ([afda79e](https://github.com/llamastack/llama-stack-client-typescript/commit/afda79e99224af2f2c78d9190d21d46ebc59dc94))
+
+
+### Chores
+
+* **internal:** move publish config ([e8eff1a](https://github.com/llamastack/llama-stack-client-typescript/commit/e8eff1a0c8853dff423d10f9701b26c24b8e904b))
+* **internal:** update comment in script ([4fff100](https://github.com/llamastack/llama-stack-client-typescript/commit/4fff100b23c168e986583d4b1a98303dbf0eba95))
+* update @stainless-api/prism-cli to v5.15.0 ([12e10b3](https://github.com/llamastack/llama-stack-client-typescript/commit/12e10b3610a405534328d835dc30d9b522cb1d31))
+
## 0.2.17 (2025-08-06)
Full Changelog: [v0.2.15...v0.2.17](https://github.com/llamastack/llama-stack-client-typescript/compare/v0.2.15...v0.2.17)
diff --git a/api.md b/api.md
index df16b33..1d0dcf0 100644
--- a/api.md
+++ b/api.md
@@ -87,7 +87,7 @@ Methods:
- client.responses.create({ ...params }) -> ResponseObject
- client.responses.retrieve(responseId) -> ResponseObject
-- client.responses.list({ ...params }) -> ResponseListResponse
+- client.responses.list({ ...params }) -> ResponseListResponsesOpenAICursorPage
## InputItems
@@ -262,7 +262,7 @@ Methods:
- client.chat.completions.create({ ...params }) -> CompletionCreateResponse
- client.chat.completions.retrieve(completionId) -> CompletionRetrieveResponse
-- client.chat.completions.list({ ...params }) -> CompletionListResponse
+- client.chat.completions.list({ ...params }) -> CompletionListResponsesOpenAICursorPage
# Completions
@@ -315,7 +315,7 @@ Methods:
- client.vectorStores.create({ ...params }) -> VectorStore
- client.vectorStores.retrieve(vectorStoreId) -> VectorStore
- client.vectorStores.update(vectorStoreId, { ...params }) -> VectorStore
-- client.vectorStores.list({ ...params }) -> ListVectorStoresResponse
+- client.vectorStores.list({ ...params }) -> VectorStoresOpenAICursorPage
- client.vectorStores.delete(vectorStoreId) -> VectorStoreDeleteResponse
- client.vectorStores.search(vectorStoreId, { ...params }) -> VectorStoreSearchResponse
@@ -324,7 +324,6 @@ Methods:
Types:
- VectorStoreFile
-- FileListResponse
- FileDeleteResponse
- FileContentResponse
@@ -333,7 +332,7 @@ Methods:
- client.vectorStores.files.create(vectorStoreId, { ...params }) -> VectorStoreFile
- client.vectorStores.files.retrieve(vectorStoreId, fileId) -> VectorStoreFile
- client.vectorStores.files.update(vectorStoreId, fileId, { ...params }) -> VectorStoreFile
-- client.vectorStores.files.list(vectorStoreId, { ...params }) -> FileListResponse
+- client.vectorStores.files.list(vectorStoreId, { ...params }) -> VectorStoreFilesOpenAICursorPage
- client.vectorStores.files.delete(vectorStoreId, fileId) -> FileDeleteResponse
- client.vectorStores.files.content(vectorStoreId, fileId) -> FileContentResponse
@@ -525,6 +524,6 @@ Methods:
- client.files.create({ ...params }) -> File
- client.files.retrieve(fileId) -> File
-- client.files.list({ ...params }) -> ListFilesResponse
+- client.files.list({ ...params }) -> FilesOpenAICursorPage
- client.files.delete(fileId) -> DeleteFileResponse
- client.files.content(fileId) -> unknown
diff --git a/bin/publish-npm b/bin/publish-npm
index fa2243d..45e8aa8 100644
--- a/bin/publish-npm
+++ b/bin/publish-npm
@@ -58,4 +58,4 @@ else
fi
# Publish with the appropriate tag
-yarn publish --access public --tag "$TAG"
+yarn publish --tag "$TAG"
diff --git a/package.json b/package.json
index 19cb1a3..60f74e7 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "llama-stack-client",
- "version": "0.2.17",
+ "version": "0.3.0",
"description": "The official TypeScript library for the Llama Stack Client API",
"author": "Llama Stack Client ",
"types": "dist/index.d.ts",
@@ -13,6 +13,9 @@
"**/*"
],
"private": false,
+ "publishConfig": {
+ "access": "public"
+ },
"scripts": {
"test": "./scripts/test",
"build": "./scripts/build",
diff --git a/scripts/mock b/scripts/mock
index d2814ae..0b28f6e 100755
--- a/scripts/mock
+++ b/scripts/mock
@@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}"
# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log &
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log &
# Wait for server to come online
echo -n "Waiting for server"
@@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then
echo
else
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL"
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL"
fi
diff --git a/scripts/test b/scripts/test
index 2049e31..7bce051 100755
--- a/scripts/test
+++ b/scripts/test
@@ -43,7 +43,7 @@ elif ! prism_is_running ; then
echo -e "To run the server, pass in the path or url of your OpenAPI"
echo -e "spec to the prism command:"
echo
- echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}"
+ echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}"
echo
exit 1
diff --git a/src/index.ts b/src/index.ts
index 042239d..8241daf 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -5,7 +5,12 @@ import * as qs from './internal/qs';
import * as Core from './core';
import * as Errors from './error';
import * as Pagination from './pagination';
-import { type DatasetsIterrowsParams, DatasetsIterrowsResponse } from './pagination';
+import {
+ type DatasetsIterrowsParams,
+ DatasetsIterrowsResponse,
+ type OpenAICursorPageParams,
+ OpenAICursorPageResponse,
+} from './pagination';
import * as Uploads from './uploads';
import * as API from './resources/index';
import {
@@ -41,6 +46,7 @@ import {
FileCreateParams,
FileListParams,
Files,
+ FilesOpenAICursorPage,
ListFilesResponse,
} from './resources/files';
import {
@@ -177,6 +183,7 @@ import {
ResponseCreateParamsStreaming,
ResponseListParams,
ResponseListResponse,
+ ResponseListResponsesOpenAICursorPage,
ResponseObject,
ResponseObjectStream,
Responses,
@@ -199,6 +206,7 @@ import {
VectorStoreSearchResponse,
VectorStoreUpdateParams,
VectorStores,
+ VectorStoresOpenAICursorPage,
} from './resources/vector-stores/vector-stores';
export interface ClientOptions {
@@ -394,6 +402,7 @@ LlamaStackClient.Toolgroups = Toolgroups;
LlamaStackClient.Tools = Tools;
LlamaStackClient.ToolRuntime = ToolRuntime;
LlamaStackClient.Responses = Responses;
+LlamaStackClient.ResponseListResponsesOpenAICursorPage = ResponseListResponsesOpenAICursorPage;
LlamaStackClient.Agents = Agents;
LlamaStackClient.Datasets = Datasets;
LlamaStackClient.Eval = Eval;
@@ -405,6 +414,7 @@ LlamaStackClient.Completions = Completions;
LlamaStackClient.VectorIo = VectorIo;
LlamaStackClient.VectorDBs = VectorDBs;
LlamaStackClient.VectorStores = VectorStores;
+LlamaStackClient.VectorStoresOpenAICursorPage = VectorStoresOpenAICursorPage;
LlamaStackClient.Models = Models;
LlamaStackClient.PostTraining = PostTraining;
LlamaStackClient.Providers = Providers;
@@ -418,6 +428,7 @@ LlamaStackClient.Scoring = Scoring;
LlamaStackClient.ScoringFunctions = ScoringFunctions;
LlamaStackClient.Benchmarks = Benchmarks;
LlamaStackClient.Files = Files;
+LlamaStackClient.FilesOpenAICursorPage = FilesOpenAICursorPage;
export declare namespace LlamaStackClient {
export type RequestOptions = Core.RequestOptions;
@@ -427,6 +438,12 @@ export declare namespace LlamaStackClient {
type DatasetsIterrowsResponse as DatasetsIterrowsResponse,
};
+ export import OpenAICursorPage = Pagination.OpenAICursorPage;
+ export {
+ type OpenAICursorPageParams as OpenAICursorPageParams,
+ type OpenAICursorPageResponse as OpenAICursorPageResponse,
+ };
+
export {
Toolgroups as Toolgroups,
type ListToolGroupsResponse as ListToolGroupsResponse,
@@ -457,6 +474,7 @@ export declare namespace LlamaStackClient {
type ResponseObject as ResponseObject,
type ResponseObjectStream as ResponseObjectStream,
type ResponseListResponse as ResponseListResponse,
+ ResponseListResponsesOpenAICursorPage as ResponseListResponsesOpenAICursorPage,
type ResponseCreateParams as ResponseCreateParams,
type ResponseCreateParamsNonStreaming as ResponseCreateParamsNonStreaming,
type ResponseCreateParamsStreaming as ResponseCreateParamsStreaming,
@@ -565,6 +583,7 @@ export declare namespace LlamaStackClient {
type VectorStore as VectorStore,
type VectorStoreDeleteResponse as VectorStoreDeleteResponse,
type VectorStoreSearchResponse as VectorStoreSearchResponse,
+ VectorStoresOpenAICursorPage as VectorStoresOpenAICursorPage,
type VectorStoreCreateParams as VectorStoreCreateParams,
type VectorStoreUpdateParams as VectorStoreUpdateParams,
type VectorStoreListParams as VectorStoreListParams,
@@ -675,6 +694,7 @@ export declare namespace LlamaStackClient {
type File as File,
type ListFilesResponse as ListFilesResponse,
type FileContentResponse as FileContentResponse,
+ FilesOpenAICursorPage as FilesOpenAICursorPage,
type FileCreateParams as FileCreateParams,
type FileListParams as FileListParams,
};
diff --git a/src/pagination.ts b/src/pagination.ts
index 9e0d28e..8eccc4b 100644
--- a/src/pagination.ts
+++ b/src/pagination.ts
@@ -60,3 +60,73 @@ export class DatasetsIterrows- extends AbstractPage
- implements Datase
};
}
}
+
+export interface OpenAICursorPageResponse
- {
+ data: Array
- ;
+
+ has_more: boolean;
+
+ last_id: string;
+}
+
+export interface OpenAICursorPageParams {
+ limit?: number;
+
+ after?: string;
+}
+
+export class OpenAICursorPage
- extends AbstractPage
- implements OpenAICursorPageResponse
- {
+ data: Array
- ;
+
+ has_more: boolean;
+
+ last_id: string;
+
+ constructor(
+ client: APIClient,
+ response: Response,
+ body: OpenAICursorPageResponse
- ,
+ options: FinalRequestOptions,
+ ) {
+ super(client, response, body, options);
+
+ this.data = body.data || [];
+ this.has_more = body.has_more || false;
+ this.last_id = body.last_id || '';
+ }
+
+ getPaginatedItems(): Item[] {
+ return this.data ?? [];
+ }
+
+ override hasNextPage(): boolean {
+ if (this.has_more === false) {
+ return false;
+ }
+
+ return super.hasNextPage();
+ }
+
+ // @deprecated Please use `nextPageInfo()` instead
+ nextPageParams(): Partial | null {
+ const info = this.nextPageInfo();
+ if (!info) return null;
+ if ('params' in info) return info.params;
+ const params = Object.fromEntries(info.url.searchParams);
+ if (!Object.keys(params).length) return null;
+ return params;
+ }
+
+ nextPageInfo(): PageInfo | null {
+ const cursor = this.last_id;
+ if (!cursor) {
+ return null;
+ }
+
+ return {
+ params: {
+ after: cursor,
+ },
+ };
+ }
+}
diff --git a/src/resources/chat/chat.ts b/src/resources/chat/chat.ts
index a38445d..b43e6d3 100644
--- a/src/resources/chat/chat.ts
+++ b/src/resources/chat/chat.ts
@@ -9,6 +9,7 @@ import {
CompletionCreateResponse,
CompletionListParams,
CompletionListResponse,
+ CompletionListResponsesOpenAICursorPage,
CompletionRetrieveResponse,
Completions,
} from './completions';
@@ -219,6 +220,7 @@ export namespace ChatCompletionChunk {
}
Chat.Completions = Completions;
+Chat.CompletionListResponsesOpenAICursorPage = CompletionListResponsesOpenAICursorPage;
export declare namespace Chat {
export { type ChatCompletionChunk as ChatCompletionChunk };
@@ -228,6 +230,7 @@ export declare namespace Chat {
type CompletionCreateResponse as CompletionCreateResponse,
type CompletionRetrieveResponse as CompletionRetrieveResponse,
type CompletionListResponse as CompletionListResponse,
+ CompletionListResponsesOpenAICursorPage as CompletionListResponsesOpenAICursorPage,
type CompletionCreateParams as CompletionCreateParams,
type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming,
type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming,
diff --git a/src/resources/chat/completions.ts b/src/resources/chat/completions.ts
index 6331b0a..c7ed5e8 100644
--- a/src/resources/chat/completions.ts
+++ b/src/resources/chat/completions.ts
@@ -6,6 +6,7 @@ import { APIPromise } from '../../core';
import * as Core from '../../core';
import * as CompletionsAPI from './completions';
import * as ChatAPI from './chat';
+import { OpenAICursorPage, type OpenAICursorPageParams } from '../../pagination';
import { Stream } from '../../streaming';
export class Completions extends APIResource {
@@ -46,19 +47,30 @@ export class Completions extends APIResource {
/**
* List all chat completions.
*/
- list(query?: CompletionListParams, options?: Core.RequestOptions): Core.APIPromise;
- list(options?: Core.RequestOptions): Core.APIPromise;
+ list(
+ query?: CompletionListParams,
+ options?: Core.RequestOptions,
+ ): Core.PagePromise;
+ list(
+ options?: Core.RequestOptions,
+ ): Core.PagePromise;
list(
query: CompletionListParams | Core.RequestOptions = {},
options?: Core.RequestOptions,
- ): Core.APIPromise {
+ ): Core.PagePromise {
if (isRequestOptions(query)) {
return this.list({}, query);
}
- return this._client.get('/v1/openai/v1/chat/completions', { query, ...options });
+ return this._client.getAPIList(
+ '/v1/openai/v1/chat/completions',
+ CompletionListResponsesOpenAICursorPage,
+ { query, ...options },
+ );
}
}
+export class CompletionListResponsesOpenAICursorPage extends OpenAICursorPage {}
+
/**
* Response from an OpenAI-compatible chat completion request.
*/
@@ -1217,756 +1229,724 @@ export namespace CompletionRetrieveResponse {
}
}
-/**
- * Response from listing OpenAI-compatible chat completions.
- */
export interface CompletionListResponse {
/**
- * List of chat completion objects with their input messages
+ * The ID of the chat completion
*/
- data: Array;
+ id: string;
/**
- * ID of the first completion in this list
+ * List of choices
*/
- first_id: string;
+ choices: Array;
/**
- * Whether there are more completions available beyond this list
+ * The Unix timestamp in seconds when the chat completion was created
*/
- has_more: boolean;
+ created: number;
+
+ input_messages: Array<
+ | CompletionListResponse.OpenAIUserMessageParam
+ | CompletionListResponse.OpenAISystemMessageParam
+ | CompletionListResponse.OpenAIAssistantMessageParam
+ | CompletionListResponse.OpenAIToolMessageParam
+ | CompletionListResponse.OpenAIDeveloperMessageParam
+ >;
/**
- * ID of the last completion in this list
+ * The model that was used to generate the chat completion
*/
- last_id: string;
+ model: string;
/**
- * Must be "list" to identify this as a list response
+ * The object type, which will be "chat.completion"
*/
- object: 'list';
+ object: 'chat.completion';
}
export namespace CompletionListResponse {
- export interface Data {
- /**
- * The ID of the chat completion
- */
- id: string;
-
+ /**
+ * A choice from an OpenAI-compatible chat completion response.
+ */
+ export interface Choice {
/**
- * List of choices
+ * The reason the model stopped generating
*/
- choices: Array;
+ finish_reason: string;
/**
- * The Unix timestamp in seconds when the chat completion was created
+ * The index of the choice
*/
- created: number;
-
- input_messages: Array<
- | Data.OpenAIUserMessageParam
- | Data.OpenAISystemMessageParam
- | Data.OpenAIAssistantMessageParam
- | Data.OpenAIToolMessageParam
- | Data.OpenAIDeveloperMessageParam
- >;
+ index: number;
/**
- * The model that was used to generate the chat completion
+ * The message from the model
*/
- model: string;
+ message:
+ | Choice.OpenAIUserMessageParam
+ | Choice.OpenAISystemMessageParam
+ | Choice.OpenAIAssistantMessageParam
+ | Choice.OpenAIToolMessageParam
+ | Choice.OpenAIDeveloperMessageParam;
/**
- * The object type, which will be "chat.completion"
+ * (Optional) The log probabilities for the tokens in the message
*/
- object: 'chat.completion';
+ logprobs?: Choice.Logprobs;
}
- export namespace Data {
+ export namespace Choice {
/**
- * A choice from an OpenAI-compatible chat completion response.
+ * A message from the user in an OpenAI-compatible chat completion request.
*/
- export interface Choice {
- /**
- * The reason the model stopped generating
- */
- finish_reason: string;
-
+ export interface OpenAIUserMessageParam {
/**
- * The index of the choice
+ * The content of the message, which can include text and other media
*/
- index: number;
+ content:
+ | string
+ | Array<
+ | OpenAIUserMessageParam.OpenAIChatCompletionContentPartTextParam
+ | OpenAIUserMessageParam.OpenAIChatCompletionContentPartImageParam
+ | OpenAIUserMessageParam.OpenAIFile
+ >;
/**
- * The message from the model
+ * Must be "user" to identify this as a user message
*/
- message:
- | Choice.OpenAIUserMessageParam
- | Choice.OpenAISystemMessageParam
- | Choice.OpenAIAssistantMessageParam
- | Choice.OpenAIToolMessageParam
- | Choice.OpenAIDeveloperMessageParam;
+ role: 'user';
/**
- * (Optional) The log probabilities for the tokens in the message
+ * (Optional) The name of the user message participant.
*/
- logprobs?: Choice.Logprobs;
+ name?: string;
}
- export namespace Choice {
+ export namespace OpenAIUserMessageParam {
/**
- * A message from the user in an OpenAI-compatible chat completion request.
+ * Text content part for OpenAI-compatible chat completion messages.
*/
- export interface OpenAIUserMessageParam {
+ export interface OpenAIChatCompletionContentPartTextParam {
/**
- * The content of the message, which can include text and other media
+ * The text content of the message
*/
- content:
- | string
- | Array<
- | OpenAIUserMessageParam.OpenAIChatCompletionContentPartTextParam
- | OpenAIUserMessageParam.OpenAIChatCompletionContentPartImageParam
- | OpenAIUserMessageParam.OpenAIFile
- >;
+ text: string;
/**
- * Must be "user" to identify this as a user message
+ * Must be "text" to identify this as text content
*/
- role: 'user';
+ type: 'text';
+ }
+ /**
+ * Image content part for OpenAI-compatible chat completion messages.
+ */
+ export interface OpenAIChatCompletionContentPartImageParam {
/**
- * (Optional) The name of the user message participant.
+ * Image URL specification and processing details
*/
- name?: string;
- }
+ image_url: OpenAIChatCompletionContentPartImageParam.ImageURL;
- export namespace OpenAIUserMessageParam {
/**
- * Text content part for OpenAI-compatible chat completion messages.
+ * Must be "image_url" to identify this as image content
*/
- export interface OpenAIChatCompletionContentPartTextParam {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
+ type: 'image_url';
+ }
+ export namespace OpenAIChatCompletionContentPartImageParam {
/**
- * Image content part for OpenAI-compatible chat completion messages.
+ * Image URL specification and processing details
*/
- export interface OpenAIChatCompletionContentPartImageParam {
- /**
- * Image URL specification and processing details
- */
- image_url: OpenAIChatCompletionContentPartImageParam.ImageURL;
-
+ export interface ImageURL {
/**
- * Must be "image_url" to identify this as image content
+ * URL of the image to include in the message
*/
- type: 'image_url';
- }
+ url: string;
- export namespace OpenAIChatCompletionContentPartImageParam {
/**
- * Image URL specification and processing details
+ * (Optional) Level of detail for image processing. Can be "low", "high", or "auto"
*/
- export interface ImageURL {
- /**
- * URL of the image to include in the message
- */
- url: string;
-
- /**
- * (Optional) Level of detail for image processing. Can be "low", "high", or "auto"
- */
- detail?: string;
- }
+ detail?: string;
}
+ }
- export interface OpenAIFile {
- file: OpenAIFile.File;
+ export interface OpenAIFile {
+ file: OpenAIFile.File;
- type: 'file';
- }
+ type: 'file';
+ }
- export namespace OpenAIFile {
- export interface File {
- file_data?: string;
+ export namespace OpenAIFile {
+ export interface File {
+ file_data?: string;
- file_id?: string;
+ file_id?: string;
- filename?: string;
- }
+ filename?: string;
}
}
+ }
+ /**
+ * A system message providing instructions or context to the model.
+ */
+ export interface OpenAISystemMessageParam {
/**
- * A system message providing instructions or context to the model.
+ * The content of the "system prompt". If multiple system messages are provided,
+ * they are concatenated. The underlying Llama Stack code may also add other system
+ * messages (for example, for formatting tool definitions).
*/
- export interface OpenAISystemMessageParam {
- /**
- * The content of the "system prompt". If multiple system messages are provided,
- * they are concatenated. The underlying Llama Stack code may also add other system
- * messages (for example, for formatting tool definitions).
- */
- content: string | Array;
-
- /**
- * Must be "system" to identify this as a system message
- */
- role: 'system';
-
- /**
- * (Optional) The name of the system message participant.
- */
- name?: string;
- }
-
- export namespace OpenAISystemMessageParam {
- /**
- * Text content part for OpenAI-compatible chat completion messages.
- */
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
- }
+ content: string | Array;
/**
- * A message containing the model's (assistant) response in an OpenAI-compatible
- * chat completion request.
+ * Must be "system" to identify this as a system message
*/
- export interface OpenAIAssistantMessageParam {
- /**
- * Must be "assistant" to identify this as the model's response
- */
- role: 'assistant';
+ role: 'system';
- /**
- * The content of the model's response
- */
- content?: string | Array;
+ /**
+ * (Optional) The name of the system message participant.
+ */
+ name?: string;
+ }
+ export namespace OpenAISystemMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
/**
- * (Optional) The name of the assistant message participant.
+ * The text content of the message
*/
- name?: string;
+ text: string;
/**
- * List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object.
+ * Must be "text" to identify this as text content
*/
- tool_calls?: Array;
+ type: 'text';
}
+ }
- export namespace OpenAIAssistantMessageParam {
- /**
- * Text content part for OpenAI-compatible chat completion messages.
- */
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
-
- /**
- * Tool call specification for OpenAI-compatible chat completion responses.
- */
- export interface ToolCall {
- /**
- * Must be "function" to identify this as a function call
- */
- type: 'function';
+ /**
+ * A message containing the model's (assistant) response in an OpenAI-compatible
+ * chat completion request.
+ */
+ export interface OpenAIAssistantMessageParam {
+ /**
+ * Must be "assistant" to identify this as the model's response
+ */
+ role: 'assistant';
- /**
- * (Optional) Unique identifier for the tool call
- */
- id?: string;
+ /**
+ * The content of the model's response
+ */
+ content?: string | Array;
- /**
- * (Optional) Function call details
- */
- function?: ToolCall.Function;
+ /**
+ * (Optional) The name of the assistant message participant.
+ */
+ name?: string;
- /**
- * (Optional) Index of the tool call in the list
- */
- index?: number;
- }
+ /**
+ * List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object.
+ */
+ tool_calls?: Array;
+ }
- export namespace ToolCall {
- /**
- * (Optional) Function call details
- */
- export interface Function {
- /**
- * (Optional) Arguments to pass to the function as a JSON string
- */
- arguments?: string;
+ export namespace OpenAIAssistantMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
+ /**
+ * The text content of the message
+ */
+ text: string;
- /**
- * (Optional) Name of the function to call
- */
- name?: string;
- }
- }
+ /**
+ * Must be "text" to identify this as text content
+ */
+ type: 'text';
}
/**
- * A message representing the result of a tool invocation in an OpenAI-compatible
- * chat completion request.
+ * Tool call specification for OpenAI-compatible chat completion responses.
*/
- export interface OpenAIToolMessageParam {
+ export interface ToolCall {
/**
- * The response content from the tool
+ * Must be "function" to identify this as a function call
*/
- content: string | Array;
+ type: 'function';
/**
- * Must be "tool" to identify this as a tool response
+ * (Optional) Unique identifier for the tool call
*/
- role: 'tool';
+ id?: string;
/**
- * Unique identifier for the tool call this response is for
+ * (Optional) Function call details
*/
- tool_call_id: string;
+ function?: ToolCall.Function;
+
+ /**
+ * (Optional) Index of the tool call in the list
+ */
+ index?: number;
}
- export namespace OpenAIToolMessageParam {
+ export namespace ToolCall {
/**
- * Text content part for OpenAI-compatible chat completion messages.
+ * (Optional) Function call details
*/
- export interface UnionMember1 {
+ export interface Function {
/**
- * The text content of the message
+ * (Optional) Arguments to pass to the function as a JSON string
*/
- text: string;
+ arguments?: string;
/**
- * Must be "text" to identify this as text content
+ * (Optional) Name of the function to call
*/
- type: 'text';
+ name?: string;
}
}
+ }
+ /**
+ * A message representing the result of a tool invocation in an OpenAI-compatible
+ * chat completion request.
+ */
+ export interface OpenAIToolMessageParam {
/**
- * A message from the developer in an OpenAI-compatible chat completion request.
+ * The response content from the tool
*/
- export interface OpenAIDeveloperMessageParam {
- /**
- * The content of the developer message
- */
- content: string | Array;
+ content: string | Array;
+
+ /**
+ * Must be "tool" to identify this as a tool response
+ */
+ role: 'tool';
+
+ /**
+ * Unique identifier for the tool call this response is for
+ */
+ tool_call_id: string;
+ }
+ export namespace OpenAIToolMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
/**
- * Must be "developer" to identify this as a developer message
+ * The text content of the message
*/
- role: 'developer';
+ text: string;
/**
- * (Optional) The name of the developer message participant.
+ * Must be "text" to identify this as text content
*/
- name?: string;
+ type: 'text';
}
+ }
- export namespace OpenAIDeveloperMessageParam {
- /**
- * Text content part for OpenAI-compatible chat completion messages.
- */
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
+ /**
+ * A message from the developer in an OpenAI-compatible chat completion request.
+ */
+ export interface OpenAIDeveloperMessageParam {
+ /**
+ * The content of the developer message
+ */
+ content: string | Array;
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
- }
+ /**
+ * Must be "developer" to identify this as a developer message
+ */
+ role: 'developer';
/**
- * (Optional) The log probabilities for the tokens in the message
+ * (Optional) The name of the developer message participant.
*/
- export interface Logprobs {
+ name?: string;
+ }
+
+ export namespace OpenAIDeveloperMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
/**
- * (Optional) The log probabilities for the tokens in the message
+ * The text content of the message
*/
- content?: Array;
+ text: string;
/**
- * (Optional) The log probabilities for the tokens in the message
+ * Must be "text" to identify this as text content
*/
- refusal?: Array;
+ type: 'text';
}
+ }
- export namespace Logprobs {
+ /**
+ * (Optional) The log probabilities for the tokens in the message
+ */
+ export interface Logprobs {
+ /**
+ * (Optional) The log probabilities for the tokens in the message
+ */
+ content?: Array;
+
+ /**
+ * (Optional) The log probabilities for the tokens in the message
+ */
+ refusal?: Array;
+ }
+
+ export namespace Logprobs {
+ /**
+ * The log probability for a token from an OpenAI-compatible chat completion
+ * response.
+ */
+ export interface Content {
+ token: string;
+
+ logprob: number;
+
+ top_logprobs: Array;
+
+ bytes?: Array;
+ }
+
+ export namespace Content {
/**
- * The log probability for a token from an OpenAI-compatible chat completion
+ * The top log probability for a token from an OpenAI-compatible chat completion
* response.
*/
- export interface Content {
+ export interface TopLogprob {
token: string;
logprob: number;
- top_logprobs: Array;
-
bytes?: Array;
}
+ }
- export namespace Content {
- /**
- * The top log probability for a token from an OpenAI-compatible chat completion
- * response.
- */
- export interface TopLogprob {
- token: string;
+ /**
+ * The log probability for a token from an OpenAI-compatible chat completion
+ * response.
+ */
+ export interface Refusal {
+ token: string;
- logprob: number;
+ logprob: number;
- bytes?: Array;
- }
- }
+ top_logprobs: Array;
+
+ bytes?: Array;
+ }
+ export namespace Refusal {
/**
- * The log probability for a token from an OpenAI-compatible chat completion
+ * The top log probability for a token from an OpenAI-compatible chat completion
* response.
*/
- export interface Refusal {
+ export interface TopLogprob {
token: string;
logprob: number;
- top_logprobs: Array;
-
bytes?: Array;
}
+ }
+ }
+ }
- export namespace Refusal {
- /**
- * The top log probability for a token from an OpenAI-compatible chat completion
- * response.
- */
- export interface TopLogprob {
- token: string;
+ /**
+ * A message from the user in an OpenAI-compatible chat completion request.
+ */
+ export interface OpenAIUserMessageParam {
+ /**
+ * The content of the message, which can include text and other media
+ */
+ content:
+ | string
+ | Array<
+ | OpenAIUserMessageParam.OpenAIChatCompletionContentPartTextParam
+ | OpenAIUserMessageParam.OpenAIChatCompletionContentPartImageParam
+ | OpenAIUserMessageParam.OpenAIFile
+ >;
- logprob: number;
+ /**
+ * Must be "user" to identify this as a user message
+ */
+ role: 'user';
- bytes?: Array;
- }
- }
- }
- }
+ /**
+ * (Optional) The name of the user message participant.
+ */
+ name?: string;
+ }
+ export namespace OpenAIUserMessageParam {
/**
- * A message from the user in an OpenAI-compatible chat completion request.
+ * Text content part for OpenAI-compatible chat completion messages.
*/
- export interface OpenAIUserMessageParam {
+ export interface OpenAIChatCompletionContentPartTextParam {
/**
- * The content of the message, which can include text and other media
+ * The text content of the message
*/
- content:
- | string
- | Array<
- | OpenAIUserMessageParam.OpenAIChatCompletionContentPartTextParam
- | OpenAIUserMessageParam.OpenAIChatCompletionContentPartImageParam
- | OpenAIUserMessageParam.OpenAIFile
- >;
+ text: string;
/**
- * Must be "user" to identify this as a user message
+ * Must be "text" to identify this as text content
*/
- role: 'user';
+ type: 'text';
+ }
+ /**
+ * Image content part for OpenAI-compatible chat completion messages.
+ */
+ export interface OpenAIChatCompletionContentPartImageParam {
/**
- * (Optional) The name of the user message participant.
+ * Image URL specification and processing details
*/
- name?: string;
- }
+ image_url: OpenAIChatCompletionContentPartImageParam.ImageURL;
- export namespace OpenAIUserMessageParam {
/**
- * Text content part for OpenAI-compatible chat completion messages.
+ * Must be "image_url" to identify this as image content
*/
- export interface OpenAIChatCompletionContentPartTextParam {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
+ type: 'image_url';
+ }
+ export namespace OpenAIChatCompletionContentPartImageParam {
/**
- * Image content part for OpenAI-compatible chat completion messages.
+ * Image URL specification and processing details
*/
- export interface OpenAIChatCompletionContentPartImageParam {
+ export interface ImageURL {
/**
- * Image URL specification and processing details
+ * URL of the image to include in the message
*/
- image_url: OpenAIChatCompletionContentPartImageParam.ImageURL;
+ url: string;
/**
- * Must be "image_url" to identify this as image content
+ * (Optional) Level of detail for image processing. Can be "low", "high", or "auto"
*/
- type: 'image_url';
+ detail?: string;
}
+ }
- export namespace OpenAIChatCompletionContentPartImageParam {
- /**
- * Image URL specification and processing details
- */
- export interface ImageURL {
- /**
- * URL of the image to include in the message
- */
- url: string;
+ export interface OpenAIFile {
+ file: OpenAIFile.File;
- /**
- * (Optional) Level of detail for image processing. Can be "low", "high", or "auto"
- */
- detail?: string;
- }
- }
+ type: 'file';
+ }
- export interface OpenAIFile {
- file: OpenAIFile.File;
+ export namespace OpenAIFile {
+ export interface File {
+ file_data?: string;
- type: 'file';
+ file_id?: string;
+
+ filename?: string;
}
+ }
+ }
- export namespace OpenAIFile {
- export interface File {
- file_data?: string;
+ /**
+ * A system message providing instructions or context to the model.
+ */
+ export interface OpenAISystemMessageParam {
+ /**
+ * The content of the "system prompt". If multiple system messages are provided,
+ * they are concatenated. The underlying Llama Stack code may also add other system
+ * messages (for example, for formatting tool definitions).
+ */
+ content: string | Array;
- file_id?: string;
+ /**
+ * Must be "system" to identify this as a system message
+ */
+ role: 'system';
- filename?: string;
- }
- }
- }
+ /**
+ * (Optional) The name of the system message participant.
+ */
+ name?: string;
+ }
+ export namespace OpenAISystemMessageParam {
/**
- * A system message providing instructions or context to the model.
+ * Text content part for OpenAI-compatible chat completion messages.
*/
- export interface OpenAISystemMessageParam {
+ export interface UnionMember1 {
/**
- * The content of the "system prompt". If multiple system messages are provided,
- * they are concatenated. The underlying Llama Stack code may also add other system
- * messages (for example, for formatting tool definitions).
+ * The text content of the message
*/
- content: string | Array;
+ text: string;
/**
- * Must be "system" to identify this as a system message
+ * Must be "text" to identify this as text content
*/
- role: 'system';
+ type: 'text';
+ }
+ }
+
+ /**
+ * A message containing the model's (assistant) response in an OpenAI-compatible
+ * chat completion request.
+ */
+ export interface OpenAIAssistantMessageParam {
+ /**
+ * Must be "assistant" to identify this as the model's response
+ */
+ role: 'assistant';
+
+ /**
+ * The content of the model's response
+ */
+ content?: string | Array;
+ /**
+ * (Optional) The name of the assistant message participant.
+ */
+ name?: string;
+
+ /**
+ * List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object.
+ */
+ tool_calls?: Array;
+ }
+
+ export namespace OpenAIAssistantMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
/**
- * (Optional) The name of the system message participant.
+ * The text content of the message
*/
- name?: string;
- }
+ text: string;
- export namespace OpenAISystemMessageParam {
/**
- * Text content part for OpenAI-compatible chat completion messages.
+ * Must be "text" to identify this as text content
*/
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
+ type: 'text';
}
/**
- * A message containing the model's (assistant) response in an OpenAI-compatible
- * chat completion request.
+ * Tool call specification for OpenAI-compatible chat completion responses.
*/
- export interface OpenAIAssistantMessageParam {
+ export interface ToolCall {
/**
- * Must be "assistant" to identify this as the model's response
+ * Must be "function" to identify this as a function call
*/
- role: 'assistant';
+ type: 'function';
/**
- * The content of the model's response
+ * (Optional) Unique identifier for the tool call
*/
- content?: string | Array;
+ id?: string;
/**
- * (Optional) The name of the assistant message participant.
+ * (Optional) Function call details
*/
- name?: string;
+ function?: ToolCall.Function;
/**
- * List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object.
+ * (Optional) Index of the tool call in the list
*/
- tool_calls?: Array;
+ index?: number;
}
- export namespace OpenAIAssistantMessageParam {
- /**
- * Text content part for OpenAI-compatible chat completion messages.
- */
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
-
+ export namespace ToolCall {
/**
- * Tool call specification for OpenAI-compatible chat completion responses.
+ * (Optional) Function call details
*/
- export interface ToolCall {
- /**
- * Must be "function" to identify this as a function call
- */
- type: 'function';
-
- /**
- * (Optional) Unique identifier for the tool call
- */
- id?: string;
-
+ export interface Function {
/**
- * (Optional) Function call details
+ * (Optional) Arguments to pass to the function as a JSON string
*/
- function?: ToolCall.Function;
+ arguments?: string;
/**
- * (Optional) Index of the tool call in the list
+ * (Optional) Name of the function to call
*/
- index?: number;
+ name?: string;
}
+ }
+ }
- export namespace ToolCall {
- /**
- * (Optional) Function call details
- */
- export interface Function {
- /**
- * (Optional) Arguments to pass to the function as a JSON string
- */
- arguments?: string;
+ /**
+ * A message representing the result of a tool invocation in an OpenAI-compatible
+ * chat completion request.
+ */
+ export interface OpenAIToolMessageParam {
+ /**
+ * The response content from the tool
+ */
+ content: string | Array;
- /**
- * (Optional) Name of the function to call
- */
- name?: string;
- }
- }
- }
+ /**
+ * Must be "tool" to identify this as a tool response
+ */
+ role: 'tool';
/**
- * A message representing the result of a tool invocation in an OpenAI-compatible
- * chat completion request.
+ * Unique identifier for the tool call this response is for
*/
- export interface OpenAIToolMessageParam {
- /**
- * The response content from the tool
- */
- content: string | Array;
+ tool_call_id: string;
+ }
+ export namespace OpenAIToolMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
/**
- * Must be "tool" to identify this as a tool response
+ * The text content of the message
*/
- role: 'tool';
+ text: string;
/**
- * Unique identifier for the tool call this response is for
+ * Must be "text" to identify this as text content
*/
- tool_call_id: string;
+ type: 'text';
}
+ }
- export namespace OpenAIToolMessageParam {
- /**
- * Text content part for OpenAI-compatible chat completion messages.
- */
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
- }
+ /**
+ * A message from the developer in an OpenAI-compatible chat completion request.
+ */
+ export interface OpenAIDeveloperMessageParam {
+ /**
+ * The content of the developer message
+ */
+ content: string | Array;
/**
- * A message from the developer in an OpenAI-compatible chat completion request.
+ * Must be "developer" to identify this as a developer message
*/
- export interface OpenAIDeveloperMessageParam {
- /**
- * The content of the developer message
- */
- content: string | Array;
+ role: 'developer';
- /**
- * Must be "developer" to identify this as a developer message
- */
- role: 'developer';
+ /**
+ * (Optional) The name of the developer message participant.
+ */
+ name?: string;
+ }
+ export namespace OpenAIDeveloperMessageParam {
+ /**
+ * Text content part for OpenAI-compatible chat completion messages.
+ */
+ export interface UnionMember1 {
/**
- * (Optional) The name of the developer message participant.
+ * The text content of the message
*/
- name?: string;
- }
+ text: string;
- export namespace OpenAIDeveloperMessageParam {
/**
- * Text content part for OpenAI-compatible chat completion messages.
+ * Must be "text" to identify this as text content
*/
- export interface UnionMember1 {
- /**
- * The text content of the message
- */
- text: string;
-
- /**
- * Must be "text" to identify this as text content
- */
- type: 'text';
- }
+ type: 'text';
}
}
}
@@ -2471,17 +2451,7 @@ export interface CompletionCreateParamsStreaming extends CompletionCreateParamsB
stream: true;
}
-export interface CompletionListParams {
- /**
- * The ID of the last chat completion to return.
- */
- after?: string;
-
- /**
- * The maximum number of chat completions to return.
- */
- limit?: number;
-
+export interface CompletionListParams extends OpenAICursorPageParams {
/**
* The model to filter by.
*/
@@ -2493,11 +2463,14 @@ export interface CompletionListParams {
order?: 'asc' | 'desc';
}
+Completions.CompletionListResponsesOpenAICursorPage = CompletionListResponsesOpenAICursorPage;
+
export declare namespace Completions {
export {
type CompletionCreateResponse as CompletionCreateResponse,
type CompletionRetrieveResponse as CompletionRetrieveResponse,
type CompletionListResponse as CompletionListResponse,
+ CompletionListResponsesOpenAICursorPage as CompletionListResponsesOpenAICursorPage,
type CompletionCreateParams as CompletionCreateParams,
type CompletionCreateParamsNonStreaming as CompletionCreateParamsNonStreaming,
type CompletionCreateParamsStreaming as CompletionCreateParamsStreaming,
diff --git a/src/resources/chat/index.ts b/src/resources/chat/index.ts
index 31c9aba..2157334 100644
--- a/src/resources/chat/index.ts
+++ b/src/resources/chat/index.ts
@@ -2,6 +2,7 @@
export { Chat, type ChatCompletionChunk } from './chat';
export {
+ CompletionListResponsesOpenAICursorPage,
Completions,
type CompletionCreateResponse,
type CompletionRetrieveResponse,
diff --git a/src/resources/files.ts b/src/resources/files.ts
index 7671fc4..fe9b226 100644
--- a/src/resources/files.ts
+++ b/src/resources/files.ts
@@ -3,6 +3,7 @@
import { APIResource } from '../resource';
import { isRequestOptions } from '../core';
import * as Core from '../core';
+import { OpenAICursorPage, type OpenAICursorPageParams } from '../pagination';
export class Files extends APIResource {
/**
@@ -26,16 +27,16 @@ export class Files extends APIResource {
/**
* Returns a list of files that belong to the user's organization.
*/
- list(query?: FileListParams, options?: Core.RequestOptions): Core.APIPromise;
- list(options?: Core.RequestOptions): Core.APIPromise;
+ list(query?: FileListParams, options?: Core.RequestOptions): Core.PagePromise;
+ list(options?: Core.RequestOptions): Core.PagePromise;
list(
query: FileListParams | Core.RequestOptions = {},
options?: Core.RequestOptions,
- ): Core.APIPromise {
+ ): Core.PagePromise {
if (isRequestOptions(query)) {
return this.list({}, query);
}
- return this._client.get('/v1/openai/v1/files', { query, ...options });
+ return this._client.getAPIList('/v1/openai/v1/files', FilesOpenAICursorPage, { query, ...options });
}
/**
@@ -53,6 +54,8 @@ export class Files extends APIResource {
}
}
+export class FilesOpenAICursorPage extends OpenAICursorPage {}
+
/**
* Response for deleting a file in OpenAI Files API.
*/
@@ -154,21 +157,7 @@ export interface FileCreateParams {
purpose: 'assistants';
}
-export interface FileListParams {
- /**
- * A cursor for use in pagination. `after` is an object ID that defines your place
- * in the list. For instance, if you make a list request and receive 100 objects,
- * ending with obj_foo, your subsequent call can include after=obj_foo in order to
- * fetch the next page of the list.
- */
- after?: string;
-
- /**
- * A limit on the number of objects to be returned. Limit can range between 1 and
- * 10,000, and the default is 10,000.
- */
- limit?: number;
-
+export interface FileListParams extends OpenAICursorPageParams {
/**
* Sort order by the `created_at` timestamp of the objects. `asc` for ascending
* order and `desc` for descending order.
@@ -181,12 +170,15 @@ export interface FileListParams {
purpose?: 'assistants';
}
+Files.FilesOpenAICursorPage = FilesOpenAICursorPage;
+
export declare namespace Files {
export {
type DeleteFileResponse as DeleteFileResponse,
type File as File,
type ListFilesResponse as ListFilesResponse,
type FileContentResponse as FileContentResponse,
+ FilesOpenAICursorPage as FilesOpenAICursorPage,
type FileCreateParams as FileCreateParams,
type FileListParams as FileListParams,
};
diff --git a/src/resources/index.ts b/src/resources/index.ts
index ceba761..36c158a 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -53,6 +53,7 @@ export {
type EvalRunEvalAlphaParams,
} from './eval/eval';
export {
+ FilesOpenAICursorPage,
Files,
type DeleteFileResponse,
type File,
@@ -97,6 +98,7 @@ export {
} from './post-training/post-training';
export { Providers, type ListProvidersResponse, type ProviderListResponse } from './providers';
export {
+ ResponseListResponsesOpenAICursorPage,
Responses,
type ResponseObject,
type ResponseObjectStream,
@@ -189,6 +191,7 @@ export {
type VectorIoQueryParams,
} from './vector-io';
export {
+ VectorStoresOpenAICursorPage,
VectorStores,
type ListVectorStoresResponse,
type VectorStore,
diff --git a/src/resources/moderations.ts b/src/resources/moderations.ts
index aee9b57..a945ab3 100644
--- a/src/resources/moderations.ts
+++ b/src/resources/moderations.ts
@@ -55,11 +55,7 @@ export namespace CreateResponse {
category_applied_input_types?: { [key: string]: Array };
/**
- * A list of the categories along with their scores as predicted by model. Required
- * set of categories that need to be in response - violence - violence/graphic -
- * harassment - harassment/threatening - hate - hate/threatening - illicit -
- * illicit/violent - sexual - sexual/minors - self-harm - self-harm/intent -
- * self-harm/instructions
+ * A list of the categories along with their scores as predicted by model.
*/
category_scores?: { [key: string]: number };
diff --git a/src/resources/responses/index.ts b/src/resources/responses/index.ts
index 6a19891..c4ea6a8 100644
--- a/src/resources/responses/index.ts
+++ b/src/resources/responses/index.ts
@@ -2,6 +2,7 @@
export { InputItems, type InputItemListResponse, type InputItemListParams } from './input-items';
export {
+ ResponseListResponsesOpenAICursorPage,
Responses,
type ResponseObject,
type ResponseObjectStream,
diff --git a/src/resources/responses/input-items.ts b/src/resources/responses/input-items.ts
index ff21948..74c556c 100644
--- a/src/resources/responses/input-items.ts
+++ b/src/resources/responses/input-items.ts
@@ -95,7 +95,39 @@ export namespace InputItemListResponse {
/**
* (Optional) Search results returned by the file search operation
*/
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
+ results?: Array;
+ }
+
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
+ /**
+ * Search results returned by the file search operation.
+ */
+ export interface Result {
+ /**
+ * (Optional) Key-value attributes associated with the file
+ */
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
+
+ /**
+ * Unique identifier of the file containing the result
+ */
+ file_id: string;
+
+ /**
+ * Name of the file containing the result
+ */
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
+ }
}
/**
diff --git a/src/resources/responses/responses.ts b/src/resources/responses/responses.ts
index f0d4d20..967846a 100644
--- a/src/resources/responses/responses.ts
+++ b/src/resources/responses/responses.ts
@@ -7,6 +7,7 @@ import * as Core from '../../core';
import * as ResponsesAPI from './responses';
import * as InputItemsAPI from './input-items';
import { InputItemListParams, InputItemListResponse, InputItems } from './input-items';
+import { OpenAICursorPage, type OpenAICursorPageParams } from '../../pagination';
import { Stream } from '../../streaming';
export class Responses extends APIResource {
@@ -45,19 +46,29 @@ export class Responses extends APIResource {
/**
* List all OpenAI responses.
*/
- list(query?: ResponseListParams, options?: Core.RequestOptions): Core.APIPromise;
- list(options?: Core.RequestOptions): Core.APIPromise;
+ list(
+ query?: ResponseListParams,
+ options?: Core.RequestOptions,
+ ): Core.PagePromise;
+ list(
+ options?: Core.RequestOptions,
+ ): Core.PagePromise;
list(
query: ResponseListParams | Core.RequestOptions = {},
options?: Core.RequestOptions,
- ): Core.APIPromise {
+ ): Core.PagePromise {
if (isRequestOptions(query)) {
return this.list({}, query);
}
- return this._client.get('/v1/openai/v1/responses', { query, ...options });
+ return this._client.getAPIList('/v1/openai/v1/responses', ResponseListResponsesOpenAICursorPage, {
+ query,
+ ...options,
+ });
}
}
+export class ResponseListResponsesOpenAICursorPage extends OpenAICursorPage {}
+
/**
* Complete OpenAI response object containing generation results and metadata.
*/
@@ -340,7 +351,39 @@ export namespace ResponseObject {
/**
* (Optional) Search results returned by the file search operation
*/
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
+ results?: Array;
+ }
+
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
+ /**
+ * Search results returned by the file search operation.
+ */
+ export interface Result {
+ /**
+ * (Optional) Key-value attributes associated with the file
+ */
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
+
+ /**
+ * Unique identifier of the file containing the result
+ */
+ file_id: string;
+
+ /**
+ * Name of the file containing the result
+ */
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
+ }
}
/**
@@ -801,7 +844,39 @@ export namespace ResponseObjectStream {
/**
* (Optional) Search results returned by the file search operation
*/
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
+ results?: Array;
+ }
+
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
+ /**
+ * Search results returned by the file search operation.
+ */
+ export interface Result {
+ /**
+ * (Optional) Key-value attributes associated with the file
+ */
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
+
+ /**
+ * Unique identifier of the file containing the result
+ */
+ file_id: string;
+
+ /**
+ * Name of the file containing the result
+ */
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
+ }
}
/**
@@ -1163,7 +1238,39 @@ export namespace ResponseObjectStream {
/**
* (Optional) Search results returned by the file search operation
*/
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
+ results?: Array;
+ }
+
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
+ /**
+ * Search results returned by the file search operation.
+ */
+ export interface Result {
+ /**
+ * (Optional) Key-value attributes associated with the file
+ */
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
+
+ /**
+ * Unique identifier of the file containing the result
+ */
+ file_id: string;
+
+ /**
+ * Name of the file containing the result
+ */
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
+ }
}
/**
@@ -1593,765 +1700,797 @@ export namespace ResponseObjectStream {
}
/**
- * Paginated list of OpenAI response objects with navigation metadata.
+ * OpenAI response object extended with input context information.
*/
export interface ResponseListResponse {
/**
- * List of response objects with their input context
+ * Unique identifier for this response
+ */
+ id: string;
+
+ /**
+ * Unix timestamp when the response was created
+ */
+ created_at: number;
+
+ /**
+ * List of input items that led to this response
+ */
+ input: Array<
+ | ResponseListResponse.OpenAIResponseOutputMessageWebSearchToolCall
+ | ResponseListResponse.OpenAIResponseOutputMessageFileSearchToolCall
+ | ResponseListResponse.OpenAIResponseOutputMessageFunctionToolCall
+ | ResponseListResponse.OpenAIResponseInputFunctionToolCallOutput
+ | ResponseListResponse.OpenAIResponseMessage
+ >;
+
+ /**
+ * Model identifier used for generation
+ */
+ model: string;
+
+ /**
+ * Object type identifier, always "response"
+ */
+ object: 'response';
+
+ /**
+ * List of generated output items (messages, tool calls, etc.)
+ */
+ output: Array<
+ | ResponseListResponse.OpenAIResponseMessage
+ | ResponseListResponse.OpenAIResponseOutputMessageWebSearchToolCall
+ | ResponseListResponse.OpenAIResponseOutputMessageFileSearchToolCall
+ | ResponseListResponse.OpenAIResponseOutputMessageFunctionToolCall
+ | ResponseListResponse.OpenAIResponseOutputMessageMcpCall
+ | ResponseListResponse.OpenAIResponseOutputMessageMcpListTools
+ >;
+
+ /**
+ * Whether tool calls can be executed in parallel
+ */
+ parallel_tool_calls: boolean;
+
+ /**
+ * Current status of the response generation
+ */
+ status: string;
+
+ /**
+ * Text formatting configuration for the response
+ */
+ text: ResponseListResponse.Text;
+
+ /**
+ * (Optional) Error details if the response generation failed
+ */
+ error?: ResponseListResponse.Error;
+
+ /**
+ * (Optional) ID of the previous response in a conversation
*/
- data: Array;
+ previous_response_id?: string;
/**
- * Identifier of the first item in this page
+ * (Optional) Sampling temperature used for generation
*/
- first_id: string;
+ temperature?: number;
/**
- * Whether there are more results available beyond this page
+ * (Optional) Nucleus sampling parameter used for generation
*/
- has_more: boolean;
+ top_p?: number;
/**
- * Identifier of the last item in this page
+ * (Optional) Truncation strategy applied to the response
*/
- last_id: string;
+ truncation?: string;
/**
- * Object type identifier, always "list"
+ * (Optional) User identifier associated with the request
*/
- object: 'list';
+ user?: string;
}
export namespace ResponseListResponse {
/**
- * OpenAI response object extended with input context information.
+ * Web search tool call output message for OpenAI responses.
*/
- export interface Data {
+ export interface OpenAIResponseOutputMessageWebSearchToolCall {
/**
- * Unique identifier for this response
+ * Unique identifier for this tool call
*/
id: string;
/**
- * Unix timestamp when the response was created
+ * Current status of the web search operation
*/
- created_at: number;
+ status: string;
/**
- * List of input items that led to this response
+ * Tool call type identifier, always "web_search_call"
*/
- input: Array<
- | Data.OpenAIResponseOutputMessageWebSearchToolCall
- | Data.OpenAIResponseOutputMessageFileSearchToolCall
- | Data.OpenAIResponseOutputMessageFunctionToolCall
- | Data.OpenAIResponseInputFunctionToolCallOutput
- | Data.OpenAIResponseMessage
- >;
+ type: 'web_search_call';
+ }
+ /**
+ * File search tool call output message for OpenAI responses.
+ */
+ export interface OpenAIResponseOutputMessageFileSearchToolCall {
/**
- * Model identifier used for generation
+ * Unique identifier for this tool call
*/
- model: string;
+ id: string;
/**
- * Object type identifier, always "response"
+ * List of search queries executed
*/
- object: 'response';
+ queries: Array;
/**
- * List of generated output items (messages, tool calls, etc.)
+ * Current status of the file search operation
*/
- output: Array<
- | Data.OpenAIResponseMessage
- | Data.OpenAIResponseOutputMessageWebSearchToolCall
- | Data.OpenAIResponseOutputMessageFileSearchToolCall
- | Data.OpenAIResponseOutputMessageFunctionToolCall
- | Data.OpenAIResponseOutputMessageMcpCall
- | Data.OpenAIResponseOutputMessageMcpListTools
- >;
+ status: string;
/**
- * Whether tool calls can be executed in parallel
+ * Tool call type identifier, always "file_search_call"
*/
- parallel_tool_calls: boolean;
+ type: 'file_search_call';
/**
- * Current status of the response generation
+ * (Optional) Search results returned by the file search operation
*/
- status: string;
+ results?: Array;
+ }
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
/**
- * Text formatting configuration for the response
+ * Search results returned by the file search operation.
*/
- text: Data.Text;
+ export interface Result {
+ /**
+ * (Optional) Key-value attributes associated with the file
+ */
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
+
+ /**
+ * Unique identifier of the file containing the result
+ */
+ file_id: string;
+
+ /**
+ * Name of the file containing the result
+ */
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
+ }
+ }
+ /**
+ * Function tool call output message for OpenAI responses.
+ */
+ export interface OpenAIResponseOutputMessageFunctionToolCall {
/**
- * (Optional) Error details if the response generation failed
+ * JSON string containing the function arguments
*/
- error?: Data.Error;
+ arguments: string;
/**
- * (Optional) ID of the previous response in a conversation
+ * Unique identifier for the function call
*/
- previous_response_id?: string;
+ call_id: string;
/**
- * (Optional) Sampling temperature used for generation
+ * Name of the function being called
*/
- temperature?: number;
+ name: string;
/**
- * (Optional) Nucleus sampling parameter used for generation
+ * Tool call type identifier, always "function_call"
*/
- top_p?: number;
+ type: 'function_call';
/**
- * (Optional) Truncation strategy applied to the response
+ * (Optional) Additional identifier for the tool call
*/
- truncation?: string;
+ id?: string;
/**
- * (Optional) User identifier associated with the request
+ * (Optional) Current status of the function call execution
*/
- user?: string;
+ status?: string;
+ }
+
+ /**
+ * This represents the output of a function call that gets passed back to the
+ * model.
+ */
+ export interface OpenAIResponseInputFunctionToolCallOutput {
+ call_id: string;
+
+ output: string;
+
+ type: 'function_call_output';
+
+ id?: string;
+
+ status?: string;
+ }
+
+ /**
+ * Corresponds to the various Message types in the Responses API. They are all
+ * under one type because the Responses API gives them all the same "type" value,
+ * and there is no way to tell them apart in certain scenarios.
+ */
+ export interface OpenAIResponseMessage {
+ content:
+ | string
+ | Array<
+ | OpenAIResponseMessage.OpenAIResponseInputMessageContentText
+ | OpenAIResponseMessage.OpenAIResponseInputMessageContentImage
+ >
+ | Array;
+
+ role: 'system' | 'developer' | 'user' | 'assistant';
+
+ type: 'message';
+
+ id?: string;
+
+ status?: string;
}
- export namespace Data {
+ export namespace OpenAIResponseMessage {
/**
- * Web search tool call output message for OpenAI responses.
+ * Text content for input messages in OpenAI response format.
*/
- export interface OpenAIResponseOutputMessageWebSearchToolCall {
- /**
- * Unique identifier for this tool call
- */
- id: string;
-
+ export interface OpenAIResponseInputMessageContentText {
/**
- * Current status of the web search operation
+ * The text content of the input message
*/
- status: string;
+ text: string;
/**
- * Tool call type identifier, always "web_search_call"
+ * Content type identifier, always "input_text"
*/
- type: 'web_search_call';
+ type: 'input_text';
}
/**
- * File search tool call output message for OpenAI responses.
+ * Image content for input messages in OpenAI response format.
*/
- export interface OpenAIResponseOutputMessageFileSearchToolCall {
+ export interface OpenAIResponseInputMessageContentImage {
/**
- * Unique identifier for this tool call
+ * Level of detail for image processing, can be "low", "high", or "auto"
*/
- id: string;
+ detail: 'low' | 'high' | 'auto';
/**
- * List of search queries executed
+ * Content type identifier, always "input_image"
*/
- queries: Array;
+ type: 'input_image';
/**
- * Current status of the file search operation
+ * (Optional) URL of the image content
*/
- status: string;
-
- /**
- * Tool call type identifier, always "file_search_call"
- */
- type: 'file_search_call';
-
- /**
- * (Optional) Search results returned by the file search operation
- */
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
+ image_url?: string;
}
- /**
- * Function tool call output message for OpenAI responses.
- */
- export interface OpenAIResponseOutputMessageFunctionToolCall {
- /**
- * JSON string containing the function arguments
- */
- arguments: string;
-
- /**
- * Unique identifier for the function call
- */
- call_id: string;
-
- /**
- * Name of the function being called
- */
- name: string;
+ export interface UnionMember2 {
+ annotations: Array<
+ | UnionMember2.OpenAIResponseAnnotationFileCitation
+ | UnionMember2.OpenAIResponseAnnotationCitation
+ | UnionMember2.OpenAIResponseAnnotationContainerFileCitation
+ | UnionMember2.OpenAIResponseAnnotationFilePath
+ >;
- /**
- * Tool call type identifier, always "function_call"
- */
- type: 'function_call';
+ text: string;
- /**
- * (Optional) Additional identifier for the tool call
- */
- id?: string;
+ type: 'output_text';
+ }
+ export namespace UnionMember2 {
/**
- * (Optional) Current status of the function call execution
+ * File citation annotation for referencing specific files in response content.
*/
- status?: string;
- }
-
- /**
- * This represents the output of a function call that gets passed back to the
- * model.
- */
- export interface OpenAIResponseInputFunctionToolCallOutput {
- call_id: string;
-
- output: string;
-
- type: 'function_call_output';
-
- id?: string;
-
- status?: string;
- }
-
- /**
- * Corresponds to the various Message types in the Responses API. They are all
- * under one type because the Responses API gives them all the same "type" value,
- * and there is no way to tell them apart in certain scenarios.
- */
- export interface OpenAIResponseMessage {
- content:
- | string
- | Array<
- | OpenAIResponseMessage.OpenAIResponseInputMessageContentText
- | OpenAIResponseMessage.OpenAIResponseInputMessageContentImage
- >
- | Array;
-
- role: 'system' | 'developer' | 'user' | 'assistant';
-
- type: 'message';
-
- id?: string;
+ export interface OpenAIResponseAnnotationFileCitation {
+ /**
+ * Unique identifier of the referenced file
+ */
+ file_id: string;
- status?: string;
- }
+ /**
+ * Name of the referenced file
+ */
+ filename: string;
- export namespace OpenAIResponseMessage {
- /**
- * Text content for input messages in OpenAI response format.
- */
- export interface OpenAIResponseInputMessageContentText {
/**
- * The text content of the input message
+ * Position index of the citation within the content
*/
- text: string;
+ index: number;
/**
- * Content type identifier, always "input_text"
+ * Annotation type identifier, always "file_citation"
*/
- type: 'input_text';
+ type: 'file_citation';
}
/**
- * Image content for input messages in OpenAI response format.
+ * URL citation annotation for referencing external web resources.
*/
- export interface OpenAIResponseInputMessageContentImage {
+ export interface OpenAIResponseAnnotationCitation {
/**
- * Level of detail for image processing, can be "low", "high", or "auto"
+ * End position of the citation span in the content
*/
- detail: 'low' | 'high' | 'auto';
+ end_index: number;
/**
- * Content type identifier, always "input_image"
+ * Start position of the citation span in the content
*/
- type: 'input_image';
+ start_index: number;
/**
- * (Optional) URL of the image content
+ * Title of the referenced web resource
*/
- image_url?: string;
- }
-
- export interface UnionMember2 {
- annotations: Array<
- | UnionMember2.OpenAIResponseAnnotationFileCitation
- | UnionMember2.OpenAIResponseAnnotationCitation
- | UnionMember2.OpenAIResponseAnnotationContainerFileCitation
- | UnionMember2.OpenAIResponseAnnotationFilePath
- >;
-
- text: string;
-
- type: 'output_text';
- }
+ title: string;
- export namespace UnionMember2 {
/**
- * File citation annotation for referencing specific files in response content.
+ * Annotation type identifier, always "url_citation"
*/
- export interface OpenAIResponseAnnotationFileCitation {
- /**
- * Unique identifier of the referenced file
- */
- file_id: string;
+ type: 'url_citation';
- /**
- * Name of the referenced file
- */
- filename: string;
+ /**
+ * URL of the referenced web resource
+ */
+ url: string;
+ }
- /**
- * Position index of the citation within the content
- */
- index: number;
+ export interface OpenAIResponseAnnotationContainerFileCitation {
+ container_id: string;
- /**
- * Annotation type identifier, always "file_citation"
- */
- type: 'file_citation';
- }
+ end_index: number;
- /**
- * URL citation annotation for referencing external web resources.
- */
- export interface OpenAIResponseAnnotationCitation {
- /**
- * End position of the citation span in the content
- */
- end_index: number;
+ file_id: string;
- /**
- * Start position of the citation span in the content
- */
- start_index: number;
+ filename: string;
- /**
- * Title of the referenced web resource
- */
- title: string;
+ start_index: number;
- /**
- * Annotation type identifier, always "url_citation"
- */
- type: 'url_citation';
+ type: 'container_file_citation';
+ }
- /**
- * URL of the referenced web resource
- */
- url: string;
- }
+ export interface OpenAIResponseAnnotationFilePath {
+ file_id: string;
- export interface OpenAIResponseAnnotationContainerFileCitation {
- container_id: string;
+ index: number;
- end_index: number;
+ type: 'file_path';
+ }
+ }
+ }
- file_id: string;
+ /**
+ * Corresponds to the various Message types in the Responses API. They are all
+ * under one type because the Responses API gives them all the same "type" value,
+ * and there is no way to tell them apart in certain scenarios.
+ */
+ export interface OpenAIResponseMessage {
+ content:
+ | string
+ | Array<
+ | OpenAIResponseMessage.OpenAIResponseInputMessageContentText
+ | OpenAIResponseMessage.OpenAIResponseInputMessageContentImage
+ >
+ | Array;
- filename: string;
+ role: 'system' | 'developer' | 'user' | 'assistant';
- start_index: number;
+ type: 'message';
- type: 'container_file_citation';
- }
+ id?: string;
- export interface OpenAIResponseAnnotationFilePath {
- file_id: string;
+ status?: string;
+ }
- index: number;
+ export namespace OpenAIResponseMessage {
+ /**
+ * Text content for input messages in OpenAI response format.
+ */
+ export interface OpenAIResponseInputMessageContentText {
+ /**
+ * The text content of the input message
+ */
+ text: string;
- type: 'file_path';
- }
- }
+ /**
+ * Content type identifier, always "input_text"
+ */
+ type: 'input_text';
}
/**
- * Corresponds to the various Message types in the Responses API. They are all
- * under one type because the Responses API gives them all the same "type" value,
- * and there is no way to tell them apart in certain scenarios.
- */
- export interface OpenAIResponseMessage {
- content:
- | string
- | Array<
- | OpenAIResponseMessage.OpenAIResponseInputMessageContentText
- | OpenAIResponseMessage.OpenAIResponseInputMessageContentImage
- >
- | Array;
+ * Image content for input messages in OpenAI response format.
+ */
+ export interface OpenAIResponseInputMessageContentImage {
+ /**
+ * Level of detail for image processing, can be "low", "high", or "auto"
+ */
+ detail: 'low' | 'high' | 'auto';
- role: 'system' | 'developer' | 'user' | 'assistant';
+ /**
+ * Content type identifier, always "input_image"
+ */
+ type: 'input_image';
- type: 'message';
+ /**
+ * (Optional) URL of the image content
+ */
+ image_url?: string;
+ }
- id?: string;
+ export interface UnionMember2 {
+ annotations: Array<
+ | UnionMember2.OpenAIResponseAnnotationFileCitation
+ | UnionMember2.OpenAIResponseAnnotationCitation
+ | UnionMember2.OpenAIResponseAnnotationContainerFileCitation
+ | UnionMember2.OpenAIResponseAnnotationFilePath
+ >;
- status?: string;
+ text: string;
+
+ type: 'output_text';
}
- export namespace OpenAIResponseMessage {
+ export namespace UnionMember2 {
/**
- * Text content for input messages in OpenAI response format.
+ * File citation annotation for referencing specific files in response content.
*/
- export interface OpenAIResponseInputMessageContentText {
+ export interface OpenAIResponseAnnotationFileCitation {
/**
- * The text content of the input message
+ * Unique identifier of the referenced file
*/
- text: string;
+ file_id: string;
/**
- * Content type identifier, always "input_text"
+ * Name of the referenced file
*/
- type: 'input_text';
+ filename: string;
+
+ /**
+ * Position index of the citation within the content
+ */
+ index: number;
+
+ /**
+ * Annotation type identifier, always "file_citation"
+ */
+ type: 'file_citation';
}
/**
- * Image content for input messages in OpenAI response format.
+ * URL citation annotation for referencing external web resources.
*/
- export interface OpenAIResponseInputMessageContentImage {
+ export interface OpenAIResponseAnnotationCitation {
/**
- * Level of detail for image processing, can be "low", "high", or "auto"
+ * End position of the citation span in the content
*/
- detail: 'low' | 'high' | 'auto';
+ end_index: number;
/**
- * Content type identifier, always "input_image"
+ * Start position of the citation span in the content
*/
- type: 'input_image';
+ start_index: number;
/**
- * (Optional) URL of the image content
+ * Title of the referenced web resource
*/
- image_url?: string;
- }
-
- export interface UnionMember2 {
- annotations: Array<
- | UnionMember2.OpenAIResponseAnnotationFileCitation
- | UnionMember2.OpenAIResponseAnnotationCitation
- | UnionMember2.OpenAIResponseAnnotationContainerFileCitation
- | UnionMember2.OpenAIResponseAnnotationFilePath
- >;
-
- text: string;
+ title: string;
- type: 'output_text';
- }
+ /**
+ * Annotation type identifier, always "url_citation"
+ */
+ type: 'url_citation';
- export namespace UnionMember2 {
/**
- * File citation annotation for referencing specific files in response content.
+ * URL of the referenced web resource
*/
- export interface OpenAIResponseAnnotationFileCitation {
- /**
- * Unique identifier of the referenced file
- */
- file_id: string;
+ url: string;
+ }
- /**
- * Name of the referenced file
- */
- filename: string;
+ export interface OpenAIResponseAnnotationContainerFileCitation {
+ container_id: string;
- /**
- * Position index of the citation within the content
- */
- index: number;
+ end_index: number;
- /**
- * Annotation type identifier, always "file_citation"
- */
- type: 'file_citation';
- }
+ file_id: string;
- /**
- * URL citation annotation for referencing external web resources.
- */
- export interface OpenAIResponseAnnotationCitation {
- /**
- * End position of the citation span in the content
- */
- end_index: number;
+ filename: string;
- /**
- * Start position of the citation span in the content
- */
- start_index: number;
+ start_index: number;
- /**
- * Title of the referenced web resource
- */
- title: string;
+ type: 'container_file_citation';
+ }
- /**
- * Annotation type identifier, always "url_citation"
- */
- type: 'url_citation';
+ export interface OpenAIResponseAnnotationFilePath {
+ file_id: string;
- /**
- * URL of the referenced web resource
- */
- url: string;
- }
+ index: number;
- export interface OpenAIResponseAnnotationContainerFileCitation {
- container_id: string;
+ type: 'file_path';
+ }
+ }
+ }
- end_index: number;
+ /**
+ * Web search tool call output message for OpenAI responses.
+ */
+ export interface OpenAIResponseOutputMessageWebSearchToolCall {
+ /**
+ * Unique identifier for this tool call
+ */
+ id: string;
- file_id: string;
+ /**
+ * Current status of the web search operation
+ */
+ status: string;
- filename: string;
+ /**
+ * Tool call type identifier, always "web_search_call"
+ */
+ type: 'web_search_call';
+ }
- start_index: number;
+ /**
+ * File search tool call output message for OpenAI responses.
+ */
+ export interface OpenAIResponseOutputMessageFileSearchToolCall {
+ /**
+ * Unique identifier for this tool call
+ */
+ id: string;
- type: 'container_file_citation';
- }
+ /**
+ * List of search queries executed
+ */
+ queries: Array;
- export interface OpenAIResponseAnnotationFilePath {
- file_id: string;
+ /**
+ * Current status of the file search operation
+ */
+ status: string;
- index: number;
+ /**
+ * Tool call type identifier, always "file_search_call"
+ */
+ type: 'file_search_call';
- type: 'file_path';
- }
- }
- }
+ /**
+ * (Optional) Search results returned by the file search operation
+ */
+ results?: Array;
+ }
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
/**
- * Web search tool call output message for OpenAI responses.
+ * Search results returned by the file search operation.
*/
- export interface OpenAIResponseOutputMessageWebSearchToolCall {
+ export interface Result {
/**
- * Unique identifier for this tool call
+ * (Optional) Key-value attributes associated with the file
*/
- id: string;
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
/**
- * Current status of the web search operation
+ * Unique identifier of the file containing the result
*/
- status: string;
+ file_id: string;
/**
- * Tool call type identifier, always "web_search_call"
+ * Name of the file containing the result
*/
- type: 'web_search_call';
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
}
+ }
+
+ /**
+ * Function tool call output message for OpenAI responses.
+ */
+ export interface OpenAIResponseOutputMessageFunctionToolCall {
+ /**
+ * JSON string containing the function arguments
+ */
+ arguments: string;
+
+ /**
+ * Unique identifier for the function call
+ */
+ call_id: string;
+
+ /**
+ * Name of the function being called
+ */
+ name: string;
+
+ /**
+ * Tool call type identifier, always "function_call"
+ */
+ type: 'function_call';
+
+ /**
+ * (Optional) Additional identifier for the tool call
+ */
+ id?: string;
+
+ /**
+ * (Optional) Current status of the function call execution
+ */
+ status?: string;
+ }
+ /**
+ * Model Context Protocol (MCP) call output message for OpenAI responses.
+ */
+ export interface OpenAIResponseOutputMessageMcpCall {
/**
- * File search tool call output message for OpenAI responses.
+ * Unique identifier for this MCP call
*/
- export interface OpenAIResponseOutputMessageFileSearchToolCall {
- /**
- * Unique identifier for this tool call
- */
- id: string;
-
- /**
- * List of search queries executed
- */
- queries: Array;
+ id: string;
- /**
- * Current status of the file search operation
- */
- status: string;
+ /**
+ * JSON string containing the MCP call arguments
+ */
+ arguments: string;
- /**
- * Tool call type identifier, always "file_search_call"
- */
- type: 'file_search_call';
+ /**
+ * Name of the MCP method being called
+ */
+ name: string;
- /**
- * (Optional) Search results returned by the file search operation
- */
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
- }
+ /**
+ * Label identifying the MCP server handling the call
+ */
+ server_label: string;
/**
- * Function tool call output message for OpenAI responses.
+ * Tool call type identifier, always "mcp_call"
*/
- export interface OpenAIResponseOutputMessageFunctionToolCall {
- /**
- * JSON string containing the function arguments
- */
- arguments: string;
+ type: 'mcp_call';
- /**
- * Unique identifier for the function call
- */
- call_id: string;
+ /**
+ * (Optional) Error message if the MCP call failed
+ */
+ error?: string;
- /**
- * Name of the function being called
- */
- name: string;
+ /**
+ * (Optional) Output result from the successful MCP call
+ */
+ output?: string;
+ }
- /**
- * Tool call type identifier, always "function_call"
- */
- type: 'function_call';
+ /**
+ * MCP list tools output message containing available tools from an MCP server.
+ */
+ export interface OpenAIResponseOutputMessageMcpListTools {
+ /**
+ * Unique identifier for this MCP list tools operation
+ */
+ id: string;
- /**
- * (Optional) Additional identifier for the tool call
- */
- id?: string;
+ /**
+ * Label identifying the MCP server providing the tools
+ */
+ server_label: string;
- /**
- * (Optional) Current status of the function call execution
- */
- status?: string;
- }
+ /**
+ * List of available tools provided by the MCP server
+ */
+ tools: Array;
/**
- * Model Context Protocol (MCP) call output message for OpenAI responses.
+ * Tool call type identifier, always "mcp_list_tools"
*/
- export interface OpenAIResponseOutputMessageMcpCall {
- /**
- * Unique identifier for this MCP call
- */
- id: string;
+ type: 'mcp_list_tools';
+ }
+ export namespace OpenAIResponseOutputMessageMcpListTools {
+ /**
+ * Tool definition returned by MCP list tools operation.
+ */
+ export interface Tool {
/**
- * JSON string containing the MCP call arguments
+ * JSON schema defining the tool's input parameters
*/
- arguments: string;
+ input_schema: { [key: string]: boolean | number | string | Array | unknown | null };
/**
- * Name of the MCP method being called
+ * Name of the tool
*/
name: string;
/**
- * Label identifying the MCP server handling the call
- */
- server_label: string;
-
- /**
- * Tool call type identifier, always "mcp_call"
- */
- type: 'mcp_call';
-
- /**
- * (Optional) Error message if the MCP call failed
- */
- error?: string;
-
- /**
- * (Optional) Output result from the successful MCP call
+ * (Optional) Description of what the tool does
*/
- output?: string;
+ description?: string;
}
+ }
+ /**
+ * Text formatting configuration for the response
+ */
+ export interface Text {
/**
- * MCP list tools output message containing available tools from an MCP server.
+ * (Optional) Text format configuration specifying output format requirements
*/
- export interface OpenAIResponseOutputMessageMcpListTools {
+ format?: Text.Format;
+ }
+
+ export namespace Text {
+ /**
+ * (Optional) Text format configuration specifying output format requirements
+ */
+ export interface Format {
/**
- * Unique identifier for this MCP list tools operation
+ * Must be "text", "json_schema", or "json_object" to identify the format type
*/
- id: string;
+ type: 'text' | 'json_schema' | 'json_object';
/**
- * Label identifying the MCP server providing the tools
+ * (Optional) A description of the response format. Only used for json_schema.
*/
- server_label: string;
+ description?: string;
/**
- * List of available tools provided by the MCP server
+ * The name of the response format. Only used for json_schema.
*/
- tools: Array;
+ name?: string;
/**
- * Tool call type identifier, always "mcp_list_tools"
+ * The JSON schema the response should conform to. In a Python SDK, this is often a
+ * `pydantic` model. Only used for json_schema.
*/
- type: 'mcp_list_tools';
- }
+ schema?: { [key: string]: boolean | number | string | Array | unknown | null };
- export namespace OpenAIResponseOutputMessageMcpListTools {
/**
- * Tool definition returned by MCP list tools operation.
+ * (Optional) Whether to strictly enforce the JSON schema. If true, the response
+ * must match the schema exactly. Only used for json_schema.
*/
- export interface Tool {
- /**
- * JSON schema defining the tool's input parameters
- */
- input_schema: { [key: string]: boolean | number | string | Array | unknown | null };
-
- /**
- * Name of the tool
- */
- name: string;
-
- /**
- * (Optional) Description of what the tool does
- */
- description?: string;
- }
+ strict?: boolean;
}
+ }
+ /**
+ * (Optional) Error details if the response generation failed
+ */
+ export interface Error {
/**
- * Text formatting configuration for the response
+ * Error code identifying the type of failure
*/
- export interface Text {
- /**
- * (Optional) Text format configuration specifying output format requirements
- */
- format?: Text.Format;
- }
-
- export namespace Text {
- /**
- * (Optional) Text format configuration specifying output format requirements
- */
- export interface Format {
- /**
- * Must be "text", "json_schema", or "json_object" to identify the format type
- */
- type: 'text' | 'json_schema' | 'json_object';
-
- /**
- * (Optional) A description of the response format. Only used for json_schema.
- */
- description?: string;
-
- /**
- * The name of the response format. Only used for json_schema.
- */
- name?: string;
-
- /**
- * The JSON schema the response should conform to. In a Python SDK, this is often a
- * `pydantic` model. Only used for json_schema.
- */
- schema?: { [key: string]: boolean | number | string | Array | unknown | null };
-
- /**
- * (Optional) Whether to strictly enforce the JSON schema. If true, the response
- * must match the schema exactly. Only used for json_schema.
- */
- strict?: boolean;
- }
- }
+ code: string;
/**
- * (Optional) Error details if the response generation failed
+ * Human-readable error message describing the failure
*/
- export interface Error {
- /**
- * Error code identifying the type of failure
- */
- code: string;
-
- /**
- * Human-readable error message describing the failure
- */
- message: string;
- }
+ message: string;
}
}
@@ -2376,6 +2515,11 @@ export interface ResponseCreateParamsBase {
*/
model: string;
+ /**
+ * (Optional) Additional fields to include in the response.
+ */
+ include?: Array;
+
instructions?: string;
max_infer_iters?: number;
@@ -2454,7 +2598,39 @@ export namespace ResponseCreateParams {
/**
* (Optional) Search results returned by the file search operation
*/
- results?: Array<{ [key: string]: boolean | number | string | Array | unknown | null }>;
+ results?: Array;
+ }
+
+ export namespace OpenAIResponseOutputMessageFileSearchToolCall {
+ /**
+ * Search results returned by the file search operation.
+ */
+ export interface Result {
+ /**
+ * (Optional) Key-value attributes associated with the file
+ */
+ attributes: { [key: string]: boolean | number | string | Array | unknown | null };
+
+ /**
+ * Unique identifier of the file containing the result
+ */
+ file_id: string;
+
+ /**
+ * Name of the file containing the result
+ */
+ filename: string;
+
+ /**
+ * Relevance score for this search result (between 0 and 1)
+ */
+ score: number;
+
+ /**
+ * Text content of the search result
+ */
+ text: string;
+ }
}
/**
@@ -2870,17 +3046,7 @@ export interface ResponseCreateParamsStreaming extends ResponseCreateParamsBase
stream: true;
}
-export interface ResponseListParams {
- /**
- * The ID of the last response to return.
- */
- after?: string;
-
- /**
- * The number of responses to return.
- */
- limit?: number;
-
+export interface ResponseListParams extends OpenAICursorPageParams {
/**
* The model to filter responses by.
*/
@@ -2892,6 +3058,7 @@ export interface ResponseListParams {
order?: 'asc' | 'desc';
}
+Responses.ResponseListResponsesOpenAICursorPage = ResponseListResponsesOpenAICursorPage;
Responses.InputItems = InputItems;
export declare namespace Responses {
@@ -2899,6 +3066,7 @@ export declare namespace Responses {
type ResponseObject as ResponseObject,
type ResponseObjectStream as ResponseObjectStream,
type ResponseListResponse as ResponseListResponse,
+ ResponseListResponsesOpenAICursorPage as ResponseListResponsesOpenAICursorPage,
type ResponseCreateParams as ResponseCreateParams,
type ResponseCreateParamsNonStreaming as ResponseCreateParamsNonStreaming,
type ResponseCreateParamsStreaming as ResponseCreateParamsStreaming,
diff --git a/src/resources/vector-stores/files.ts b/src/resources/vector-stores/files.ts
index a0cbdab..bc950cc 100644
--- a/src/resources/vector-stores/files.ts
+++ b/src/resources/vector-stores/files.ts
@@ -3,6 +3,7 @@
import { APIResource } from '../../resource';
import { isRequestOptions } from '../../core';
import * as Core from '../../core';
+import { OpenAICursorPage, type OpenAICursorPageParams } from '../../pagination';
export class Files extends APIResource {
/**
@@ -49,17 +50,24 @@ export class Files extends APIResource {
vectorStoreId: string,
query?: FileListParams,
options?: Core.RequestOptions,
- ): Core.APIPromise;
- list(vectorStoreId: string, options?: Core.RequestOptions): Core.APIPromise;
+ ): Core.PagePromise;
+ list(
+ vectorStoreId: string,
+ options?: Core.RequestOptions,
+ ): Core.PagePromise;
list(
vectorStoreId: string,
query: FileListParams | Core.RequestOptions = {},
options?: Core.RequestOptions,
- ): Core.APIPromise {
+ ): Core.PagePromise {
if (isRequestOptions(query)) {
return this.list(vectorStoreId, {}, query);
}
- return this._client.get(`/v1/openai/v1/vector_stores/${vectorStoreId}/files`, { query, ...options });
+ return this._client.getAPIList(
+ `/v1/openai/v1/vector_stores/${vectorStoreId}/files`,
+ VectorStoreFilesOpenAICursorPage,
+ { query, ...options },
+ );
}
/**
@@ -85,6 +93,8 @@ export class Files extends APIResource {
}
}
+export class VectorStoreFilesOpenAICursorPage extends OpenAICursorPage {}
+
/**
* OpenAI Vector Store File object.
*/
@@ -196,36 +206,6 @@ export namespace VectorStoreFile {
}
}
-/**
- * Response from listing files in a vector store.
- */
-export interface FileListResponse {
- /**
- * List of vector store file objects
- */
- data: Array;
-
- /**
- * Whether there are more files available beyond this page
- */
- has_more: boolean;
-
- /**
- * Object type identifier, always "list"
- */
- object: string;
-
- /**
- * (Optional) ID of the first file in the list for pagination
- */
- first_id?: string;
-
- /**
- * (Optional) ID of the last file in the list for pagination
- */
- last_id?: string;
-}
-
/**
* Response from deleting a vector store file.
*/
@@ -358,13 +338,7 @@ export interface FileUpdateParams {
attributes: { [key: string]: boolean | number | string | Array | unknown | null };
}
-export interface FileListParams {
- /**
- * (Optional) A cursor for use in pagination. `after` is an object ID that defines
- * your place in the list.
- */
- after?: string;
-
+export interface FileListParams extends OpenAICursorPageParams {
/**
* (Optional) A cursor for use in pagination. `before` is an object ID that defines
* your place in the list.
@@ -376,12 +350,6 @@ export interface FileListParams {
*/
filter?: 'completed' | 'in_progress' | 'cancelled' | 'failed';
- /**
- * (Optional) A limit on the number of objects to be returned. Limit can range
- * between 1 and 100, and the default is 20.
- */
- limit?: number;
-
/**
* (Optional) Sort order by the `created_at` timestamp of the objects. `asc` for
* ascending order and `desc` for descending order.
@@ -389,12 +357,14 @@ export interface FileListParams {
order?: string;
}
+Files.VectorStoreFilesOpenAICursorPage = VectorStoreFilesOpenAICursorPage;
+
export declare namespace Files {
export {
type VectorStoreFile as VectorStoreFile,
- type FileListResponse as FileListResponse,
type FileDeleteResponse as FileDeleteResponse,
type FileContentResponse as FileContentResponse,
+ VectorStoreFilesOpenAICursorPage as VectorStoreFilesOpenAICursorPage,
type FileCreateParams as FileCreateParams,
type FileUpdateParams as FileUpdateParams,
type FileListParams as FileListParams,
diff --git a/src/resources/vector-stores/index.ts b/src/resources/vector-stores/index.ts
index d4d883a..4b35bbb 100644
--- a/src/resources/vector-stores/index.ts
+++ b/src/resources/vector-stores/index.ts
@@ -1,9 +1,9 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export {
+ VectorStoreFilesOpenAICursorPage,
Files,
type VectorStoreFile,
- type FileListResponse,
type FileDeleteResponse,
type FileContentResponse,
type FileCreateParams,
@@ -11,6 +11,7 @@ export {
type FileListParams,
} from './files';
export {
+ VectorStoresOpenAICursorPage,
VectorStores,
type ListVectorStoresResponse,
type VectorStore,
diff --git a/src/resources/vector-stores/vector-stores.ts b/src/resources/vector-stores/vector-stores.ts
index 90c672d..e8994e2 100644
--- a/src/resources/vector-stores/vector-stores.ts
+++ b/src/resources/vector-stores/vector-stores.ts
@@ -9,11 +9,12 @@ import {
FileCreateParams,
FileDeleteResponse,
FileListParams,
- FileListResponse,
FileUpdateParams,
Files,
VectorStoreFile,
+ VectorStoreFilesOpenAICursorPage,
} from './files';
+import { OpenAICursorPage, type OpenAICursorPageParams } from '../../pagination';
export class VectorStores extends APIResource {
files: FilesAPI.Files = new FilesAPI.Files(this._client);
@@ -49,16 +50,19 @@ export class VectorStores extends APIResource {
list(
query?: VectorStoreListParams,
options?: Core.RequestOptions,
- ): Core.APIPromise;
- list(options?: Core.RequestOptions): Core.APIPromise;
+ ): Core.PagePromise;
+ list(options?: Core.RequestOptions): Core.PagePromise;
list(
query: VectorStoreListParams | Core.RequestOptions = {},
options?: Core.RequestOptions,
- ): Core.APIPromise {
+ ): Core.PagePromise {
if (isRequestOptions(query)) {
return this.list({}, query);
}
- return this._client.get('/v1/openai/v1/vector_stores', { query, ...options });
+ return this._client.getAPIList('/v1/openai/v1/vector_stores', VectorStoresOpenAICursorPage, {
+ query,
+ ...options,
+ });
}
/**
@@ -81,6 +85,8 @@ export class VectorStores extends APIResource {
}
}
+export class VectorStoresOpenAICursorPage extends OpenAICursorPage {}
+
/**
* Response from listing vector stores.
*/
@@ -363,25 +369,13 @@ export interface VectorStoreUpdateParams {
name?: string;
}
-export interface VectorStoreListParams {
- /**
- * A cursor for use in pagination. `after` is an object ID that defines your place
- * in the list.
- */
- after?: string;
-
+export interface VectorStoreListParams extends OpenAICursorPageParams {
/**
* A cursor for use in pagination. `before` is an object ID that defines your place
* in the list.
*/
before?: string;
- /**
- * A limit on the number of objects to be returned. Limit can range between 1 and
- * 100, and the default is 20.
- */
- limit?: number;
-
/**
* Sort order by the `created_at` timestamp of the objects. `asc` for ascending
* order and `desc` for descending order.
@@ -438,7 +432,9 @@ export namespace VectorStoreSearchParams {
}
}
+VectorStores.VectorStoresOpenAICursorPage = VectorStoresOpenAICursorPage;
VectorStores.Files = Files;
+VectorStores.VectorStoreFilesOpenAICursorPage = VectorStoreFilesOpenAICursorPage;
export declare namespace VectorStores {
export {
@@ -446,6 +442,7 @@ export declare namespace VectorStores {
type VectorStore as VectorStore,
type VectorStoreDeleteResponse as VectorStoreDeleteResponse,
type VectorStoreSearchResponse as VectorStoreSearchResponse,
+ VectorStoresOpenAICursorPage as VectorStoresOpenAICursorPage,
type VectorStoreCreateParams as VectorStoreCreateParams,
type VectorStoreUpdateParams as VectorStoreUpdateParams,
type VectorStoreListParams as VectorStoreListParams,
@@ -455,9 +452,9 @@ export declare namespace VectorStores {
export {
Files as Files,
type VectorStoreFile as VectorStoreFile,
- type FileListResponse as FileListResponse,
type FileDeleteResponse as FileDeleteResponse,
type FileContentResponse as FileContentResponse,
+ VectorStoreFilesOpenAICursorPage as VectorStoreFilesOpenAICursorPage,
type FileCreateParams as FileCreateParams,
type FileUpdateParams as FileUpdateParams,
type FileListParams as FileListParams,
diff --git a/src/version.ts b/src/version.ts
index df384a5..88f4d40 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '0.2.17'; // x-release-please-version
+export const VERSION = '0.3.0'; // x-release-please-version
diff --git a/tests/api-resources/responses/responses.test.ts b/tests/api-resources/responses/responses.test.ts
index 79575ae..3f14892 100644
--- a/tests/api-resources/responses/responses.test.ts
+++ b/tests/api-resources/responses/responses.test.ts
@@ -21,6 +21,7 @@ describe('resource responses', () => {
const response = await client.responses.create({
input: 'string',
model: 'model',
+ include: ['string'],
instructions: 'instructions',
max_infer_iters: 0,
previous_response_id: 'previous_response_id',