From 438573d38c1dd202636bce1b30e52bc139089c5f Mon Sep 17 00:00:00 2001 From: Liam Thompson Date: Mon, 18 Aug 2025 18:41:05 +0200 Subject: [PATCH] [DOCS] Remove instances of added in 0.0.0 + unrelated randomness --- docs/examples/languageExamples.json | 316 ++++++++++++++++- output/openapi/elasticsearch-openapi.json | 20 +- output/schema/schema.json | 323 ++++++++++++++++-- .../request/PutAi21RequestExample2.yaml | 4 +- .../PutAmazonSageMakerRequestExample1.yaml | 4 +- .../PutAmazonSageMakerRequestExample3.yaml | 4 +- .../PutAmazonSageMakerRequestExample4.yaml | 4 +- .../request/PutCustomRequestExample5.yaml | 4 +- .../snapshot/create/SnapshotCreateRequest.ts | 2 +- .../SnapshotCreateRepositoryRequest.ts | 2 +- .../SnapshotDeleteRepositoryRequest.ts | 2 +- .../snapshot/get/SnapshotGetRequest.ts | 2 +- .../SnapshotGetRepositoryRequest.ts | 2 +- .../restore/SnapshotRestoreRequest.ts | 2 +- .../SnapshotVerifyRepositoryRequest.ts | 2 +- 15 files changed, 634 insertions(+), 59 deletions(-) diff --git a/docs/examples/languageExamples.json b/docs/examples/languageExamples.json index 4bc38bc692..52cdfb6409 100644 --- a/docs/examples/languageExamples.json +++ b/docs/examples/languageExamples.json @@ -3048,23 +3048,23 @@ "specification/indices/clone/examples/request/indicesCloneRequestExample1.yaml": [ { "language": "Python", - "code": "resp = client.indices.clone(\n index=\"my_source_index\",\n target=\"my_target_index\",\n settings={\n \"index.number_of_shards\": 5\n },\n aliases={\n \"my_search_indices\": {}\n },\n)" + "code": "resp = client.indices.clone(\n index=\"my_source_index\",\n target=\"my_target_index\",\n settings={\n \"index.refresh_interval\": \"2s\"\n },\n aliases={\n \"my_search_indices\": {}\n },\n)" }, { "language": "JavaScript", - "code": "const response = await client.indices.clone({\n index: \"my_source_index\",\n target: \"my_target_index\",\n settings: {\n \"index.number_of_shards\": 5,\n },\n aliases: {\n my_search_indices: {},\n },\n});" + "code": "const response = await client.indices.clone({\n index: \"my_source_index\",\n target: \"my_target_index\",\n settings: {\n \"index.refresh_interval\": \"2s\",\n },\n aliases: {\n my_search_indices: {},\n },\n});" }, { "language": "Ruby", - "code": "response = client.indices.clone(\n index: \"my_source_index\",\n target: \"my_target_index\",\n body: {\n \"settings\": {\n \"index.number_of_shards\": 5\n },\n \"aliases\": {\n \"my_search_indices\": {}\n }\n }\n)" + "code": "response = client.indices.clone(\n index: \"my_source_index\",\n target: \"my_target_index\",\n body: {\n \"settings\": {\n \"index.refresh_interval\": \"2s\"\n },\n \"aliases\": {\n \"my_search_indices\": {}\n }\n }\n)" }, { "language": "PHP", - "code": "$resp = $client->indices()->clone([\n \"index\" => \"my_source_index\",\n \"target\" => \"my_target_index\",\n \"body\" => [\n \"settings\" => [\n \"index.number_of_shards\" => 5,\n ],\n \"aliases\" => [\n \"my_search_indices\" => new ArrayObject([]),\n ],\n ],\n]);" + "code": "$resp = $client->indices()->clone([\n \"index\" => \"my_source_index\",\n \"target\" => \"my_target_index\",\n \"body\" => [\n \"settings\" => [\n \"index.refresh_interval\" => \"2s\",\n ],\n \"aliases\" => [\n \"my_search_indices\" => new ArrayObject([]),\n ],\n ],\n]);" }, { "language": "curl", - "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"settings\":{\"index.number_of_shards\":5},\"aliases\":{\"my_search_indices\":{}}}' \"$ELASTICSEARCH_URL/my_source_index/_clone/my_target_index\"" + "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"settings\":{\"index.refresh_interval\":\"2s\"},\"aliases\":{\"my_search_indices\":{}}}' \"$ELASTICSEARCH_URL/my_source_index/_clone/my_target_index\"" }, { "language": "Java", @@ -9700,23 +9700,23 @@ "specification/simulate/ingest/examples/request/SimulateIngestRequestExample2.yaml": [ { "language": "Python", - "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_index\": \"my-index\",\n \"_id\": 123,\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": 456,\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n pipeline_substitutions={\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n },\n)" + "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_index\": \"my-index\",\n \"_id\": \"123\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": \"456\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n pipeline_substitutions={\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n },\n)" }, { "language": "JavaScript", - "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _index: \"my-index\",\n _id: 123,\n _source: {\n foo: \"bar\",\n },\n },\n {\n _index: \"my-index\",\n _id: 456,\n _source: {\n foo: \"rab\",\n },\n },\n ],\n pipeline_substitutions: {\n \"my-pipeline\": {\n processors: [\n {\n uppercase: {\n field: \"foo\",\n },\n },\n ],\n },\n },\n});" + "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _index: \"my-index\",\n _id: \"123\",\n _source: {\n foo: \"bar\",\n },\n },\n {\n _index: \"my-index\",\n _id: \"456\",\n _source: {\n foo: \"rab\",\n },\n },\n ],\n pipeline_substitutions: {\n \"my-pipeline\": {\n processors: [\n {\n uppercase: {\n field: \"foo\",\n },\n },\n ],\n },\n },\n});" }, { "language": "Ruby", - "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_index\": \"my-index\",\n \"_id\": 123,\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": 456,\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n \"pipeline_substitutions\": {\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n }\n }\n)" + "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_index\": \"my-index\",\n \"_id\": \"123\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": \"456\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n \"pipeline_substitutions\": {\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n }\n }\n)" }, { "language": "PHP", - "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_index\" => \"my-index\",\n \"_id\" => 123,\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_index\" => \"my-index\",\n \"_id\" => 456,\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n \"pipeline_substitutions\" => [\n \"my-pipeline\" => [\n \"processors\" => array(\n [\n \"uppercase\" => [\n \"field\" => \"foo\",\n ],\n ],\n ),\n ],\n ],\n ],\n]);" + "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_index\" => \"my-index\",\n \"_id\" => \"123\",\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_index\" => \"my-index\",\n \"_id\" => \"456\",\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n \"pipeline_substitutions\" => [\n \"my-pipeline\" => [\n \"processors\" => array(\n [\n \"uppercase\" => [\n \"field\" => \"foo\",\n ],\n ],\n ),\n ],\n ],\n ],\n]);" }, { "language": "curl", - "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_index\":\"my-index\",\"_id\":123,\"_source\":{\"foo\":\"bar\"}},{\"_index\":\"my-index\",\"_id\":456,\"_source\":{\"foo\":\"rab\"}}],\"pipeline_substitutions\":{\"my-pipeline\":{\"processors\":[{\"uppercase\":{\"field\":\"foo\"}}]}}}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"" + "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_index\":\"my-index\",\"_id\":\"123\",\"_source\":{\"foo\":\"bar\"}},{\"_index\":\"my-index\",\"_id\":\"456\",\"_source\":{\"foo\":\"rab\"}}],\"pipeline_substitutions\":{\"my-pipeline\":{\"processors\":[{\"uppercase\":{\"field\":\"foo\"}}]}}}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"" }, { "language": "Java", @@ -9778,23 +9778,23 @@ "specification/simulate/ingest/examples/request/SimulateIngestRequestExample1.yaml": [ { "language": "Python", - "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_id\": 123,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": 456,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n)" + "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_id\": \"123\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": \"456\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n)" }, { "language": "JavaScript", - "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _id: 123,\n _index: \"my-index\",\n _source: {\n foo: \"bar\",\n },\n },\n {\n _id: 456,\n _index: \"my-index\",\n _source: {\n foo: \"rab\",\n },\n },\n ],\n});" + "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _id: \"123\",\n _index: \"my-index\",\n _source: {\n foo: \"bar\",\n },\n },\n {\n _id: \"456\",\n _index: \"my-index\",\n _source: {\n foo: \"rab\",\n },\n },\n ],\n});" }, { "language": "Ruby", - "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_id\": 123,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": 456,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ]\n }\n)" + "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_id\": \"123\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": \"456\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ]\n }\n)" }, { "language": "PHP", - "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_id\" => 123,\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_id\" => 456,\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n ],\n]);" + "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_id\" => \"123\",\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_id\" => \"456\",\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n ],\n]);" }, { "language": "curl", - "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_id\":123,\"_index\":\"my-index\",\"_source\":{\"foo\":\"bar\"}},{\"_id\":456,\"_index\":\"my-index\",\"_source\":{\"foo\":\"rab\"}}]}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"" + "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_id\":\"123\",\"_index\":\"my-index\",\"_source\":{\"foo\":\"bar\"}},{\"_id\":\"456\",\"_index\":\"my-index\",\"_source\":{\"foo\":\"rab\"}}]}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"" }, { "language": "Java", @@ -16936,5 +16936,291 @@ "language": "curl", "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"cohere\",\"service_settings\":{\"api_key\":\"Cohere-API-key\",\"model_id\":\"command-a-03-2025\"}}' \"$ELASTICSEARCH_URL/_inference/completion/cohere-completion\"" } + ], + "specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample1.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"amazon_sagemaker_embeddings\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\",\n \"dimensions\": 384,\n \"element_type\": \"float\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"amazon_sagemaker_embeddings\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n dimensions: 384,\n element_type: \"float\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"amazon_sagemaker_embeddings\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\",\n \"dimensions\": 384,\n \"element_type\": \"float\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"amazon_sagemaker_embeddings\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n \"dimensions\" => 384,\n \"element_type\" => \"float\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\",\"dimensions\":384,\"element_type\":\"float\"}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/amazon_sagemaker_embeddings\"" + } + ], + "specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample2.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"completion\",\n inference_id=\"amazon_sagemaker_completion\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"completion\",\n inference_id: \"amazon_sagemaker_completion\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"completion\",\n inference_id: \"amazon_sagemaker_completion\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"completion\",\n \"inference_id\" => \"amazon_sagemaker_completion\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/completion/amazon_sagemaker_completion\"" + } + ], + "specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample3.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"chat_completion\",\n inference_id=\"amazon_sagemaker_chat_completion\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"chat_completion\",\n inference_id: \"amazon_sagemaker_chat_completion\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"chat_completion\",\n inference_id: \"amazon_sagemaker_chat_completion\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"chat_completion\",\n \"inference_id\" => \"amazon_sagemaker_chat_completion\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/chat_completion/amazon_sagemaker_chat_completion\"" + } + ], + "specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample4.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"sparse_embedding\",\n inference_id=\"amazon_sagemaker_sparse_embedding\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"sparse_embedding\",\n inference_id: \"amazon_sagemaker_sparse_embedding\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"sparse_embedding\",\n inference_id: \"amazon_sagemaker_sparse_embedding\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"sparse_embedding\",\n \"inference_id\" => \"amazon_sagemaker_sparse_embedding\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/sparse_embedding/amazon_sagemaker_sparse_embedding\"" + } + ], + "specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample5.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"amazon_sagemaker_rerank\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"amazon_sagemaker_rerank\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"amazon_sagemaker_rerank\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"amazon_sagemaker_rerank\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/rerank/amazon_sagemaker_rerank\"" + } + ], + "specification/inference/put_ai21/examples/request/PutAi21RequestExample1.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"completion\",\n inference_id=\"ai21-completion\",\n inference_config={\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-large\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"completion\",\n inference_id: \"ai21-completion\",\n inference_config: {\n service: \"ai21\",\n service_settings: {\n api_key: \"ai21-api-key\",\n model_id: \"jamba-large\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"completion\",\n inference_id: \"ai21-completion\",\n body: {\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-large\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"completion\",\n \"inference_id\" => \"ai21-completion\",\n \"body\" => [\n \"service\" => \"ai21\",\n \"service_settings\" => [\n \"api_key\" => \"ai21-api-key\",\n \"model_id\" => \"jamba-large\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"ai21\",\"service_settings\":{\"api_key\":\"ai21-api-key\",\"model_id\":\"jamba-large\"}}' \"$ELASTICSEARCH_URL/_inference/completion/ai21-completion\"" + } + ], + "specification/inference/put_ai21/examples/request/PutAi21RequestExample2.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"chat-completion\",\n inference_id=\"ai21-chat-completion\",\n inference_config={\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-mini\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"chat-completion\",\n inference_id: \"ai21-chat-completion\",\n inference_config: {\n service: \"ai21\",\n service_settings: {\n api_key: \"ai21-api-key\",\n model_id: \"jamba-mini\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"chat-completion\",\n inference_id: \"ai21-chat-completion\",\n body: {\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-mini\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"chat-completion\",\n \"inference_id\" => \"ai21-chat-completion\",\n \"body\" => [\n \"service\" => \"ai21\",\n \"service_settings\" => [\n \"api_key\" => \"ai21-api-key\",\n \"model_id\" => \"jamba-mini\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"ai21\",\"service_settings\":{\"api_key\":\"ai21-api-key\",\"model_id\":\"jamba-mini\"}}' \"$ELASTICSEARCH_URL/_inference/chat-completion/ai21-chat-completion\"" + } + ], + "specification/inference/put_custom/examples/request/PutCustomRequestExample1.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"custom-embeddings\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.openai.com/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\"\n },\n \"request\": \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"custom-embeddings\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.openai.com/v1/embeddings\",\n headers: {\n Authorization: \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\",\n },\n request: '{\"input\": ${input}, \"model\": \"text-embedding-3-small\"}',\n response: {\n json_parser: {\n text_embeddings: \"$.data[*].embedding[*]\",\n },\n },\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"custom-embeddings\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.openai.com/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\"\n },\n \"request\": \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"custom-embeddings\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.openai.com/v1/embeddings\",\n \"headers\" => [\n \"Authorization\" => \"Bearer ${api_key}\",\n \"Content-Type\" => \"application/json;charset=utf-8\",\n ],\n \"request\" => \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\" => [\n \"json_parser\" => [\n \"text_embeddings\" => \"$.data[*].embedding[*]\",\n ],\n ],\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.openai.com/v1/embeddings\",\"headers\":{\"Authorization\":\"Bearer ${api_key}\",\"Content-Type\":\"application/json;charset=utf-8\"},\"request\":\"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\"response\":{\"json_parser\":{\"text_embeddings\":\"$.data[*].embedding[*]\"}}}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/custom-embeddings\"" + } + ], + "specification/inference/put_custom/examples/request/PutCustomRequestExample3.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"custom-text-embedding\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/embed\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.embeddings.float[*]\"\n }\n },\n \"input_type\": {\n \"translation\": {\n \"ingest\": \"search_document\",\n \"search\": \"search_query\"\n },\n \"default\": \"search_document\"\n }\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.cohere.com/v2/embed\",\n headers: {\n Authorization: \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\",\n },\n request:\n '{\"texts\": ${input}, \"model\": \"embed-v4.0\", \"input_type\": ${input_type}}',\n response: {\n json_parser: {\n text_embeddings: \"$.embeddings.float[*]\",\n },\n },\n input_type: {\n translation: {\n ingest: \"search_document\",\n search: \"search_query\",\n },\n default: \"search_document\",\n },\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/embed\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.embeddings.float[*]\"\n }\n },\n \"input_type\": {\n \"translation\": {\n \"ingest\": \"search_document\",\n \"search\": \"search_query\"\n },\n \"default\": \"search_document\"\n }\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"custom-text-embedding\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.cohere.com/v2/embed\",\n \"headers\" => [\n \"Authorization\" => \"bearer ${api_key}\",\n \"Content-Type\" => \"application/json\",\n ],\n \"request\" => \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\" => [\n \"json_parser\" => [\n \"text_embeddings\" => \"$.embeddings.float[*]\",\n ],\n ],\n \"input_type\" => [\n \"translation\" => [\n \"ingest\" => \"search_document\",\n \"search\" => \"search_query\",\n ],\n \"default\" => \"search_document\",\n ],\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.cohere.com/v2/embed\",\"headers\":{\"Authorization\":\"bearer ${api_key}\",\"Content-Type\":\"application/json\"},\"request\":\"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\"response\":{\"json_parser\":{\"text_embeddings\":\"$.embeddings.float[*]\"}},\"input_type\":{\"translation\":{\"ingest\":\"search_document\",\"search\":\"search_query\"},\"default\":\"search_document\"}}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/custom-text-embedding\"" + } + ], + "specification/inference/put_custom/examples/request/PutCustomRequestExample2.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"custom-rerank\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/rerank\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"reranked_index\": \"$.results[*].index\",\n \"relevance_score\": \"$.results[*].relevance_score\"\n }\n }\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"custom-rerank\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.cohere.com/v2/rerank\",\n headers: {\n Authorization: \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\",\n },\n request:\n '{\"documents\": ${input}, \"query\": ${query}, \"model\": \"rerank-v3.5\"}',\n response: {\n json_parser: {\n reranked_index: \"$.results[*].index\",\n relevance_score: \"$.results[*].relevance_score\",\n },\n },\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"custom-rerank\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/rerank\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"reranked_index\": \"$.results[*].index\",\n \"relevance_score\": \"$.results[*].relevance_score\"\n }\n }\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"custom-rerank\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.cohere.com/v2/rerank\",\n \"headers\" => [\n \"Authorization\" => \"bearer ${api_key}\",\n \"Content-Type\" => \"application/json\",\n ],\n \"request\" => \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\" => [\n \"json_parser\" => [\n \"reranked_index\" => \"$.results[*].index\",\n \"relevance_score\" => \"$.results[*].relevance_score\",\n ],\n ],\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.cohere.com/v2/rerank\",\"headers\":{\"Authorization\":\"bearer ${api_key}\",\"Content-Type\":\"application/json\"},\"request\":\"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\"response\":{\"json_parser\":{\"reranked_index\":\"$.results[*].index\",\"relevance_score\":\"$.results[*].relevance_score\"}}}}' \"$ELASTICSEARCH_URL/_inference/rerank/custom-rerank\"" + } + ], + "specification/inference/put_custom/examples/request/PutCustomRequestExample5.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"custom-text-embedding-hf\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"input\\\": ${input}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding-hf\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"/v1/embeddings\",\n headers: {\n Authorization: \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\",\n },\n request: '{\"input\": ${input}}',\n response: {\n json_parser: {\n text_embeddings: \"$.data[*].embedding[*]\",\n },\n },\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding-hf\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"input\\\": ${input}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"custom-text-embedding-hf\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"/v1/embeddings\",\n \"headers\" => [\n \"Authorization\" => \"Bearer ${api_key}\",\n \"Content-Type\" => \"application/json\",\n ],\n \"request\" => \"{\\\"input\\\": ${input}}\",\n \"response\" => [\n \"json_parser\" => [\n \"text_embeddings\" => \"$.data[*].embedding[*]\",\n ],\n ],\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"/v1/embeddings\",\"headers\":{\"Authorization\":\"Bearer ${api_key}\",\"Content-Type\":\"application/json\"},\"request\":\"{\\\"input\\\": ${input}}\",\"response\":{\"json_parser\":{\"text_embeddings\":\"$.data[*].embedding[*]\"}}}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/custom-text-embedding-hf\"" + } + ], + "specification/inference/put_custom/examples/request/PutCustomRequestExample4.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"custom-rerank-jina\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.jina.ai/v1/rerank\",\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer ${api_key}\"\n },\n \"request\": \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\": {\n \"json_parser\": {\n \"relevance_score\": \"$.results[*].relevance_score\",\n \"reranked_index\": \"$.results[*].index\"\n }\n }\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"custom-rerank-jina\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.jina.ai/v1/rerank\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: \"Bearer ${api_key}\",\n },\n request:\n '{\"model\": \"jina-reranker-v2-base-multilingual\",\"query\": ${query},\"documents\":${input}}',\n response: {\n json_parser: {\n relevance_score: \"$.results[*].relevance_score\",\n reranked_index: \"$.results[*].index\",\n },\n },\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"custom-rerank-jina\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.jina.ai/v1/rerank\",\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer ${api_key}\"\n },\n \"request\": \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\": {\n \"json_parser\": {\n \"relevance_score\": \"$.results[*].relevance_score\",\n \"reranked_index\": \"$.results[*].index\"\n }\n }\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"custom-rerank-jina\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.jina.ai/v1/rerank\",\n \"headers\" => [\n \"Content-Type\" => \"application/json\",\n \"Authorization\" => \"Bearer ${api_key}\",\n ],\n \"request\" => \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\" => [\n \"json_parser\" => [\n \"relevance_score\" => \"$.results[*].relevance_score\",\n \"reranked_index\" => \"$.results[*].index\",\n ],\n ],\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.jina.ai/v1/rerank\",\"headers\":{\"Content-Type\":\"application/json\",\"Authorization\":\"Bearer ${api_key}\"},\"request\":\"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\"response\":{\"json_parser\":{\"relevance_score\":\"$.results[*].relevance_score\",\"reranked_index\":\"$.results[*].index\"}}}}' \"$ELASTICSEARCH_URL/_inference/rerank/custom-rerank-jina\"" + } + ], + "specification/inference/put_azureaistudio/examples/request/PutAzureAiStudioRequestExample3.yaml": [ + { + "language": "Python", + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"azure_ai_studio_rerank\",\n inference_config={\n \"service\": \"azureaistudio\",\n \"service_settings\": {\n \"api_key\": \"Azure-AI-Studio-API-key\",\n \"target\": \"Target-URI\",\n \"provider\": \"cohere\",\n \"endpoint_type\": \"token\"\n }\n },\n)" + }, + { + "language": "JavaScript", + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"azure_ai_studio_rerank\",\n inference_config: {\n service: \"azureaistudio\",\n service_settings: {\n api_key: \"Azure-AI-Studio-API-key\",\n target: \"Target-URI\",\n provider: \"cohere\",\n endpoint_type: \"token\",\n },\n },\n});" + }, + { + "language": "Ruby", + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"azure_ai_studio_rerank\",\n body: {\n \"service\": \"azureaistudio\",\n \"service_settings\": {\n \"api_key\": \"Azure-AI-Studio-API-key\",\n \"target\": \"Target-URI\",\n \"provider\": \"cohere\",\n \"endpoint_type\": \"token\"\n }\n }\n)" + }, + { + "language": "PHP", + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"azure_ai_studio_rerank\",\n \"body\" => [\n \"service\" => \"azureaistudio\",\n \"service_settings\" => [\n \"api_key\" => \"Azure-AI-Studio-API-key\",\n \"target\" => \"Target-URI\",\n \"provider\" => \"cohere\",\n \"endpoint_type\" => \"token\",\n ],\n ],\n]);" + }, + { + "language": "curl", + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"azureaistudio\",\"service_settings\":{\"api_key\":\"Azure-AI-Studio-API-key\",\"target\":\"Target-URI\",\"provider\":\"cohere\",\"endpoint_type\":\"token\"}}' \"$ELASTICSEARCH_URL/_inference/rerank/azure_ai_studio_rerank\"" + } ] } \ No newline at end of file diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 09e955801a..63f700a14e 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -44626,7 +44626,7 @@ } } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44667,7 +44667,7 @@ "$ref": "#/components/responses/snapshot.create-200" } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44708,7 +44708,7 @@ "$ref": "#/components/responses/snapshot.create-200" } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44818,7 +44818,7 @@ "$ref": "#/components/responses/snapshot.get_repository-200" } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44859,7 +44859,7 @@ "$ref": "#/components/responses/snapshot.create_repository-200" } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44900,7 +44900,7 @@ "$ref": "#/components/responses/snapshot.create_repository-200" } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44960,7 +44960,7 @@ } } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -44990,7 +44990,7 @@ "$ref": "#/components/responses/snapshot.get_repository-200" } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -45534,7 +45534,7 @@ } } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", @@ -45711,7 +45711,7 @@ } } }, - "x-state": "Generally available; Added in 0.0.0", + "x-state": "Generally available", "x-metaTags": [ { "content": "Elasticsearch", diff --git a/output/schema/schema.json b/output/schema/schema.json index 59331ae7f5..3b97ac3540 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -21799,7 +21799,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -21847,7 +21846,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -21936,7 +21934,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -21978,7 +21975,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -22020,7 +22016,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -22146,7 +22141,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -22247,7 +22241,6 @@ "visibility": "private" }, "stack": { - "since": "0.0.0", "stability": "stable" } }, @@ -151601,23 +151594,23 @@ "indicesCloneRequestExample1": { "alternatives": [ { - "code": "resp = client.indices.clone(\n index=\"my_source_index\",\n target=\"my_target_index\",\n settings={\n \"index.number_of_shards\": 5\n },\n aliases={\n \"my_search_indices\": {}\n },\n)", + "code": "resp = client.indices.clone(\n index=\"my_source_index\",\n target=\"my_target_index\",\n settings={\n \"index.refresh_interval\": \"2s\"\n },\n aliases={\n \"my_search_indices\": {}\n },\n)", "language": "Python" }, { - "code": "const response = await client.indices.clone({\n index: \"my_source_index\",\n target: \"my_target_index\",\n settings: {\n \"index.number_of_shards\": 5,\n },\n aliases: {\n my_search_indices: {},\n },\n});", + "code": "const response = await client.indices.clone({\n index: \"my_source_index\",\n target: \"my_target_index\",\n settings: {\n \"index.refresh_interval\": \"2s\",\n },\n aliases: {\n my_search_indices: {},\n },\n});", "language": "JavaScript" }, { - "code": "response = client.indices.clone(\n index: \"my_source_index\",\n target: \"my_target_index\",\n body: {\n \"settings\": {\n \"index.number_of_shards\": 5\n },\n \"aliases\": {\n \"my_search_indices\": {}\n }\n }\n)", + "code": "response = client.indices.clone(\n index: \"my_source_index\",\n target: \"my_target_index\",\n body: {\n \"settings\": {\n \"index.refresh_interval\": \"2s\"\n },\n \"aliases\": {\n \"my_search_indices\": {}\n }\n }\n)", "language": "Ruby" }, { - "code": "$resp = $client->indices()->clone([\n \"index\" => \"my_source_index\",\n \"target\" => \"my_target_index\",\n \"body\" => [\n \"settings\" => [\n \"index.number_of_shards\" => 5,\n ],\n \"aliases\" => [\n \"my_search_indices\" => new ArrayObject([]),\n ],\n ],\n]);", + "code": "$resp = $client->indices()->clone([\n \"index\" => \"my_source_index\",\n \"target\" => \"my_target_index\",\n \"body\" => [\n \"settings\" => [\n \"index.refresh_interval\" => \"2s\",\n ],\n \"aliases\" => [\n \"my_search_indices\" => new ArrayObject([]),\n ],\n ],\n]);", "language": "PHP" }, { - "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"settings\":{\"index.number_of_shards\":5},\"aliases\":{\"my_search_indices\":{}}}' \"$ELASTICSEARCH_URL/my_source_index/_clone/my_target_index\"", + "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"settings\":{\"index.refresh_interval\":\"2s\"},\"aliases\":{\"my_search_indices\":{}}}' \"$ELASTICSEARCH_URL/my_source_index/_clone/my_target_index\"", "language": "curl" }, { @@ -174622,11 +174615,55 @@ "description": "Create a AI21 inference endpoint.\n\nCreate an inference endpoint to perform an inference task with the `ai21` service.", "examples": { "PutAi21RequestExample1": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"completion\",\n inference_id=\"ai21-completion\",\n inference_config={\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-large\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"completion\",\n inference_id: \"ai21-completion\",\n inference_config: {\n service: \"ai21\",\n service_settings: {\n api_key: \"ai21-api-key\",\n model_id: \"jamba-large\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"completion\",\n inference_id: \"ai21-completion\",\n body: {\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-large\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"completion\",\n \"inference_id\" => \"ai21-completion\",\n \"body\" => [\n \"service\" => \"ai21\",\n \"service_settings\" => [\n \"api_key\" => \"ai21-api-key\",\n \"model_id\" => \"jamba-large\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"ai21\",\"service_settings\":{\"api_key\":\"ai21-api-key\",\"model_id\":\"jamba-large\"}}' \"$ELASTICSEARCH_URL/_inference/completion/ai21-completion\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/completion/ai21-completion` to create an AI21 inference endpoint that performs a `completion` task.", "method_request": "PUT _inference/completion/ai21-completion", "value": "{\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-large\" \n }\n}" }, "PutAi21RequestExample2": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"chat-completion\",\n inference_id=\"ai21-chat-completion\",\n inference_config={\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-mini\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"chat-completion\",\n inference_id: \"ai21-chat-completion\",\n inference_config: {\n service: \"ai21\",\n service_settings: {\n api_key: \"ai21-api-key\",\n model_id: \"jamba-mini\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"chat-completion\",\n inference_id: \"ai21-chat-completion\",\n body: {\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-mini\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"chat-completion\",\n \"inference_id\" => \"ai21-chat-completion\",\n \"body\" => [\n \"service\" => \"ai21\",\n \"service_settings\" => [\n \"api_key\" => \"ai21-api-key\",\n \"model_id\" => \"jamba-mini\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"ai21\",\"service_settings\":{\"api_key\":\"ai21-api-key\",\"model_id\":\"jamba-mini\"}}' \"$ELASTICSEARCH_URL/_inference/chat-completion/ai21-chat-completion\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/chat-completion/ai21-chat-completion` to create a AI21 inference endpoint that performs a `chat_completion` task.", "method_request": "PUT _inference/chat-completion/ai21-chat-completion", "value": "{\n \"service\": \"ai21\",\n \"service_settings\": {\n \"api_key\": \"ai21-api-key\",\n \"model_id\": \"jamba-mini\" \n }\n}" @@ -175229,30 +175266,140 @@ "description": "Create an Amazon SageMaker inference endpoint.\n\nCreate an inference endpoint to perform an inference task with the `amazon_sagemaker` service.", "examples": { "PutAmazonSageMakerRequestExample1": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"amazon_sagemaker_embeddings\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\",\n \"dimensions\": 384,\n \"element_type\": \"float\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"amazon_sagemaker_embeddings\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n dimensions: 384,\n element_type: \"float\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"amazon_sagemaker_embeddings\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\",\n \"dimensions\": 384,\n \"element_type\": \"float\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"amazon_sagemaker_embeddings\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n \"dimensions\" => 384,\n \"element_type\" => \"float\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\",\"dimensions\":384,\"element_type\":\"float\"}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/amazon_sagemaker_embeddings\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/text_embedding/amazon_sagemaker_embeddings` to create an inference endpoint that performs a text embedding task.", "method_request": "PUT _inference/text_embedding/amazon_sagemaker_embeddings", "summary": "A text embedding task", "value": "{\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\",\n \"dimensions\": 384,\n \"element_type\": \"float\"\n }\n}" }, "PutAmazonSageMakerRequestExample2": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"completion\",\n inference_id=\"amazon_sagemaker_completion\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"completion\",\n inference_id: \"amazon_sagemaker_completion\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"completion\",\n inference_id: \"amazon_sagemaker_completion\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"completion\",\n \"inference_id\" => \"amazon_sagemaker_completion\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/completion/amazon_sagemaker_completion\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/completion/amazon_sagemaker_completion` to create an inference endpoint that performs a completion task.", "method_request": "PUT _inference/completion/amazon_sagemaker_completion", "summary": "A completion task", "value": "{\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n}" }, "PutAmazonSageMakerRequestExample3": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"chat_completion\",\n inference_id=\"amazon_sagemaker_chat_completion\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"chat_completion\",\n inference_id: \"amazon_sagemaker_chat_completion\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"chat_completion\",\n inference_id: \"amazon_sagemaker_chat_completion\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"chat_completion\",\n \"inference_id\" => \"amazon_sagemaker_chat_completion\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/chat_completion/amazon_sagemaker_chat_completion\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/chat_completion/amazon_sagemaker_chat_completion` to create an inference endpoint that performs a chat completion task.", "method_request": "PUT _inference/chat_completion/amazon_sagemaker_chat_completion", "summary": "A chat completion task", "value": "{\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n}" }, "PutAmazonSageMakerRequestExample4": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"sparse_embedding\",\n inference_id=\"amazon_sagemaker_sparse_embedding\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"sparse_embedding\",\n inference_id: \"amazon_sagemaker_sparse_embedding\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"sparse_embedding\",\n inference_id: \"amazon_sagemaker_sparse_embedding\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"sparse_embedding\",\n \"inference_id\" => \"amazon_sagemaker_sparse_embedding\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/sparse_embedding/amazon_sagemaker_sparse_embedding\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/sparse_embedding/amazon_sagemaker_sparse_embedding` to create an inference endpoint that performs a sparse embedding task.", "method_request": "PUT _inference/sparse_embedding/amazon_sagemaker_sparse_embedding", "summary": "A sparse embedding task", "value": "{\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n}" }, "PutAmazonSageMakerRequestExample5": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"amazon_sagemaker_rerank\",\n inference_config={\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"amazon_sagemaker_rerank\",\n inference_config: {\n service: \"amazon_sagemaker\",\n service_settings: {\n access_key: \"AWS-access-key\",\n secret_key: \"AWS-secret-key\",\n region: \"us-east-1\",\n api: \"elastic\",\n endpoint_name: \"my-endpoint\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"amazon_sagemaker_rerank\",\n body: {\n \"service\": \"amazon_sagemaker\",\n \"service_settings\": {\n \"access_key\": \"AWS-access-key\",\n \"secret_key\": \"AWS-secret-key\",\n \"region\": \"us-east-1\",\n \"api\": \"elastic\",\n \"endpoint_name\": \"my-endpoint\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"amazon_sagemaker_rerank\",\n \"body\" => [\n \"service\" => \"amazon_sagemaker\",\n \"service_settings\" => [\n \"access_key\" => \"AWS-access-key\",\n \"secret_key\" => \"AWS-secret-key\",\n \"region\" => \"us-east-1\",\n \"api\" => \"elastic\",\n \"endpoint_name\" => \"my-endpoint\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"amazon_sagemaker\",\"service_settings\":{\"access_key\":\"AWS-access-key\",\"secret_key\":\"AWS-secret-key\",\"region\":\"us-east-1\",\"api\":\"elastic\",\"endpoint_name\":\"my-endpoint\"}}' \"$ELASTICSEARCH_URL/_inference/rerank/amazon_sagemaker_rerank\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/rerank/amazon_sagemaker_rerank` to create an inference endpoint that performs a rerank task.", "method_request": "PUT _inference/rerank/amazon_sagemaker_rerank", "summary": "A rerank task", @@ -175624,6 +175771,28 @@ "value": "{\n \"service\": \"azureaistudio\",\n \"service_settings\": {\n \"api_key\": \"Azure-AI-Studio-API-key\",\n \"target\": \"Target-URI\",\n \"provider\": \"databricks\",\n \"endpoint_type\": \"realtime\"\n }\n}" }, "PutAzureAiStudioRequestExample3": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"azure_ai_studio_rerank\",\n inference_config={\n \"service\": \"azureaistudio\",\n \"service_settings\": {\n \"api_key\": \"Azure-AI-Studio-API-key\",\n \"target\": \"Target-URI\",\n \"provider\": \"cohere\",\n \"endpoint_type\": \"token\"\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"azure_ai_studio_rerank\",\n inference_config: {\n service: \"azureaistudio\",\n service_settings: {\n api_key: \"Azure-AI-Studio-API-key\",\n target: \"Target-URI\",\n provider: \"cohere\",\n endpoint_type: \"token\",\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"azure_ai_studio_rerank\",\n body: {\n \"service\": \"azureaistudio\",\n \"service_settings\": {\n \"api_key\": \"Azure-AI-Studio-API-key\",\n \"target\": \"Target-URI\",\n \"provider\": \"cohere\",\n \"endpoint_type\": \"token\"\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"azure_ai_studio_rerank\",\n \"body\" => [\n \"service\" => \"azureaistudio\",\n \"service_settings\" => [\n \"api_key\" => \"Azure-AI-Studio-API-key\",\n \"target\" => \"Target-URI\",\n \"provider\" => \"cohere\",\n \"endpoint_type\" => \"token\",\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"azureaistudio\",\"service_settings\":{\"api_key\":\"Azure-AI-Studio-API-key\",\"target\":\"Target-URI\",\"provider\":\"cohere\",\"endpoint_type\":\"token\"}}' \"$ELASTICSEARCH_URL/_inference/rerank/azure_ai_studio_rerank\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/rerank/azure_ai_studio_rerank` to create an inference endpoint that performs a rerank task.", "method_request": "PUT _inference/rerank/azure_ai_studio_rerank", "summary": "A rerank task", @@ -176191,30 +176360,140 @@ "description": "Create a custom inference endpoint.\n\nThe custom service gives more control over how to interact with external inference services that aren't explicitly supported through dedicated integrations.\nThe custom service gives you the ability to define the headers, url, query parameters, request body, and secrets.\nThe custom service supports the template replacement functionality, which enables you to define a template that can be replaced with the value associated with that key.\nTemplates are portions of a string that start with `${` and end with `}`.\nThe parameters `secret_parameters` and `task_settings` are checked for keys for template replacement. Template replacement is supported in the `request`, `headers`, `url`, and `query_parameters`.\nIf the definition (key) is not found for a template, an error message is returned.\nIn case of an endpoint definition like the following:\n```\nPUT _inference/text_embedding/test-text-embedding\n{\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"...endpoints.huggingface.cloud/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"input\\\": ${input}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\":\"$.data[*].embedding[*]\"\n }\n }\n }\n}\n```\nTo replace `${api_key}` the `secret_parameters` and `task_settings` are checked for a key named `api_key`.\n\n> info\n> Templates should not be surrounded by quotes.\n\nPre-defined templates:\n* `${input}` refers to the array of input strings that comes from the `input` field of the subsequent inference requests.\n* `${input_type}` refers to the input type translation values.\n* `${query}` refers to the query field used specifically for reranking tasks.\n* `${top_n}` refers to the `top_n` field available when performing rerank requests.\n* `${return_documents}` refers to the `return_documents` field available when performing rerank requests.", "examples": { "PutCustomRequestExample1": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"custom-embeddings\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.openai.com/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\"\n },\n \"request\": \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"custom-embeddings\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.openai.com/v1/embeddings\",\n headers: {\n Authorization: \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\",\n },\n request: '{\"input\": ${input}, \"model\": \"text-embedding-3-small\"}',\n response: {\n json_parser: {\n text_embeddings: \"$.data[*].embedding[*]\",\n },\n },\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"custom-embeddings\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.openai.com/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\"\n },\n \"request\": \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"custom-embeddings\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.openai.com/v1/embeddings\",\n \"headers\" => [\n \"Authorization\" => \"Bearer ${api_key}\",\n \"Content-Type\" => \"application/json;charset=utf-8\",\n ],\n \"request\" => \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\" => [\n \"json_parser\" => [\n \"text_embeddings\" => \"$.data[*].embedding[*]\",\n ],\n ],\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.openai.com/v1/embeddings\",\"headers\":{\"Authorization\":\"Bearer ${api_key}\",\"Content-Type\":\"application/json;charset=utf-8\"},\"request\":\"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\"response\":{\"json_parser\":{\"text_embeddings\":\"$.data[*].embedding[*]\"}}}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/custom-embeddings\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/text_embedding/custom-embeddings` to create an inference endpoint that performs a text embedding task.", "method_request": "PUT _inference/text_embedding/custom-embeddings", "summary": "Custom text embedding task (OpenAI)", "value": "{\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.openai.com/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json;charset=utf-8\"\n },\n \"request\": \"{\\\"input\\\": ${input}, \\\"model\\\": \\\"text-embedding-3-small\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n}" }, "PutCustomRequestExample2": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"custom-rerank\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/rerank\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"reranked_index\": \"$.results[*].index\",\n \"relevance_score\": \"$.results[*].relevance_score\"\n }\n }\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"custom-rerank\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.cohere.com/v2/rerank\",\n headers: {\n Authorization: \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\",\n },\n request:\n '{\"documents\": ${input}, \"query\": ${query}, \"model\": \"rerank-v3.5\"}',\n response: {\n json_parser: {\n reranked_index: \"$.results[*].index\",\n relevance_score: \"$.results[*].relevance_score\",\n },\n },\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"custom-rerank\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/rerank\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"reranked_index\": \"$.results[*].index\",\n \"relevance_score\": \"$.results[*].relevance_score\"\n }\n }\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"custom-rerank\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.cohere.com/v2/rerank\",\n \"headers\" => [\n \"Authorization\" => \"bearer ${api_key}\",\n \"Content-Type\" => \"application/json\",\n ],\n \"request\" => \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\" => [\n \"json_parser\" => [\n \"reranked_index\" => \"$.results[*].index\",\n \"relevance_score\" => \"$.results[*].relevance_score\",\n ],\n ],\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.cohere.com/v2/rerank\",\"headers\":{\"Authorization\":\"bearer ${api_key}\",\"Content-Type\":\"application/json\"},\"request\":\"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\"response\":{\"json_parser\":{\"reranked_index\":\"$.results[*].index\",\"relevance_score\":\"$.results[*].relevance_score\"}}}}' \"$ELASTICSEARCH_URL/_inference/rerank/custom-rerank\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/rerank/custom-rerank` to create an inference endpoint that performs a rerank task.", "method_request": "PUT _inference/rerank/custom-rerank", "summary": "Custom rerank task (Cohere APIv2)", "value": "{\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/rerank\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"documents\\\": ${input}, \\\"query\\\": ${query}, \\\"model\\\": \\\"rerank-v3.5\\\"}\",\n \"response\": {\n \"json_parser\": {\n \"reranked_index\":\"$.results[*].index\",\n \"relevance_score\":\"$.results[*].relevance_score\"\n }\n }\n }\n}" }, "PutCustomRequestExample3": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"custom-text-embedding\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/embed\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.embeddings.float[*]\"\n }\n },\n \"input_type\": {\n \"translation\": {\n \"ingest\": \"search_document\",\n \"search\": \"search_query\"\n },\n \"default\": \"search_document\"\n }\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.cohere.com/v2/embed\",\n headers: {\n Authorization: \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\",\n },\n request:\n '{\"texts\": ${input}, \"model\": \"embed-v4.0\", \"input_type\": ${input_type}}',\n response: {\n json_parser: {\n text_embeddings: \"$.embeddings.float[*]\",\n },\n },\n input_type: {\n translation: {\n ingest: \"search_document\",\n search: \"search_query\",\n },\n default: \"search_document\",\n },\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/embed\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.embeddings.float[*]\"\n }\n },\n \"input_type\": {\n \"translation\": {\n \"ingest\": \"search_document\",\n \"search\": \"search_query\"\n },\n \"default\": \"search_document\"\n }\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"custom-text-embedding\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.cohere.com/v2/embed\",\n \"headers\" => [\n \"Authorization\" => \"bearer ${api_key}\",\n \"Content-Type\" => \"application/json\",\n ],\n \"request\" => \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\" => [\n \"json_parser\" => [\n \"text_embeddings\" => \"$.embeddings.float[*]\",\n ],\n ],\n \"input_type\" => [\n \"translation\" => [\n \"ingest\" => \"search_document\",\n \"search\" => \"search_query\",\n ],\n \"default\" => \"search_document\",\n ],\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.cohere.com/v2/embed\",\"headers\":{\"Authorization\":\"bearer ${api_key}\",\"Content-Type\":\"application/json\"},\"request\":\"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\"response\":{\"json_parser\":{\"text_embeddings\":\"$.embeddings.float[*]\"}},\"input_type\":{\"translation\":{\"ingest\":\"search_document\",\"search\":\"search_query\"},\"default\":\"search_document\"}}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/custom-text-embedding\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/text_embedding/custom-text-embedding` to create an inference endpoint that performs a text embedding task.", "method_request": "PUT _inference/text_embedding/custom-text-embedding", "summary": "Custom text embedding task (Cohere APIv2)", "value": "{\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.cohere.com/v2/embed\",\n \"headers\": {\n \"Authorization\": \"bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"texts\\\": ${input}, \\\"model\\\": \\\"embed-v4.0\\\", \\\"input_type\\\": ${input_type}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\":\"$.embeddings.float[*]\"\n }\n },\n \"input_type\": {\n \"translation\": {\n \"ingest\": \"search_document\",\n \"search\": \"search_query\"\n },\n \"default\": \"search_document\"\n }\n }\n}" }, "PutCustomRequestExample4": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"rerank\",\n inference_id=\"custom-rerank-jina\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.jina.ai/v1/rerank\",\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer ${api_key}\"\n },\n \"request\": \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\": {\n \"json_parser\": {\n \"relevance_score\": \"$.results[*].relevance_score\",\n \"reranked_index\": \"$.results[*].index\"\n }\n }\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"rerank\",\n inference_id: \"custom-rerank-jina\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"https://api.jina.ai/v1/rerank\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: \"Bearer ${api_key}\",\n },\n request:\n '{\"model\": \"jina-reranker-v2-base-multilingual\",\"query\": ${query},\"documents\":${input}}',\n response: {\n json_parser: {\n relevance_score: \"$.results[*].relevance_score\",\n reranked_index: \"$.results[*].index\",\n },\n },\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"rerank\",\n inference_id: \"custom-rerank-jina\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"https://api.jina.ai/v1/rerank\",\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer ${api_key}\"\n },\n \"request\": \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\": {\n \"json_parser\": {\n \"relevance_score\": \"$.results[*].relevance_score\",\n \"reranked_index\": \"$.results[*].index\"\n }\n }\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"rerank\",\n \"inference_id\" => \"custom-rerank-jina\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"https://api.jina.ai/v1/rerank\",\n \"headers\" => [\n \"Content-Type\" => \"application/json\",\n \"Authorization\" => \"Bearer ${api_key}\",\n ],\n \"request\" => \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\" => [\n \"json_parser\" => [\n \"relevance_score\" => \"$.results[*].relevance_score\",\n \"reranked_index\" => \"$.results[*].index\",\n ],\n ],\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"https://api.jina.ai/v1/rerank\",\"headers\":{\"Content-Type\":\"application/json\",\"Authorization\":\"Bearer ${api_key}\"},\"request\":\"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\"response\":{\"json_parser\":{\"relevance_score\":\"$.results[*].relevance_score\",\"reranked_index\":\"$.results[*].index\"}}}}' \"$ELASTICSEARCH_URL/_inference/rerank/custom-rerank-jina\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/rerank/custom-rerank-jina` to create an inference endpoint that performs a rerank task.", "method_request": "PUT _inference/rerank/custom-rerank-jina", "summary": "Custom rerank task (Jina AI)", "value": "{\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n }, \n \"url\": \"https://api.jina.ai/v1/rerank\",\n \"headers\": {\n \"Content-Type\": \"application/json\",\n \"Authorization\": \"Bearer ${api_key}\"\n },\n \"request\": \"{\\\"model\\\": \\\"jina-reranker-v2-base-multilingual\\\",\\\"query\\\": ${query},\\\"documents\\\":${input}}\",\n \"response\": {\n \"json_parser\": {\n \"relevance_score\": \"$.results[*].relevance_score\",\n \"reranked_index\": \"$.results[*].index\"\n }\n }\n }\n}" }, "PutCustomRequestExample5": { + "alternatives": [ + { + "code": "resp = client.inference.put(\n task_type=\"text_embedding\",\n inference_id=\"custom-text-embedding-hf\",\n inference_config={\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"input\\\": ${input}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n },\n)", + "language": "Python" + }, + { + "code": "const response = await client.inference.put({\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding-hf\",\n inference_config: {\n service: \"custom\",\n service_settings: {\n secret_parameters: {\n api_key: \"\",\n },\n url: \"/v1/embeddings\",\n headers: {\n Authorization: \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\",\n },\n request: '{\"input\": ${input}}',\n response: {\n json_parser: {\n text_embeddings: \"$.data[*].embedding[*]\",\n },\n },\n },\n },\n});", + "language": "JavaScript" + }, + { + "code": "response = client.inference.put(\n task_type: \"text_embedding\",\n inference_id: \"custom-text-embedding-hf\",\n body: {\n \"service\": \"custom\",\n \"service_settings\": {\n \"secret_parameters\": {\n \"api_key\": \"\"\n },\n \"url\": \"/v1/embeddings\",\n \"headers\": {\n \"Authorization\": \"Bearer ${api_key}\",\n \"Content-Type\": \"application/json\"\n },\n \"request\": \"{\\\"input\\\": ${input}}\",\n \"response\": {\n \"json_parser\": {\n \"text_embeddings\": \"$.data[*].embedding[*]\"\n }\n }\n }\n }\n)", + "language": "Ruby" + }, + { + "code": "$resp = $client->inference()->put([\n \"task_type\" => \"text_embedding\",\n \"inference_id\" => \"custom-text-embedding-hf\",\n \"body\" => [\n \"service\" => \"custom\",\n \"service_settings\" => [\n \"secret_parameters\" => [\n \"api_key\" => \"\",\n ],\n \"url\" => \"/v1/embeddings\",\n \"headers\" => [\n \"Authorization\" => \"Bearer ${api_key}\",\n \"Content-Type\" => \"application/json\",\n ],\n \"request\" => \"{\\\"input\\\": ${input}}\",\n \"response\" => [\n \"json_parser\" => [\n \"text_embeddings\" => \"$.data[*].embedding[*]\",\n ],\n ],\n ],\n ],\n]);", + "language": "PHP" + }, + { + "code": "curl -X PUT -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"service\":\"custom\",\"service_settings\":{\"secret_parameters\":{\"api_key\":\"\"},\"url\":\"/v1/embeddings\",\"headers\":{\"Authorization\":\"Bearer ${api_key}\",\"Content-Type\":\"application/json\"},\"request\":\"{\\\"input\\\": ${input}}\",\"response\":{\"json_parser\":{\"text_embeddings\":\"$.data[*].embedding[*]\"}}}}' \"$ELASTICSEARCH_URL/_inference/text_embedding/custom-text-embedding-hf\"", + "language": "curl" + } + ], "description": "Run `PUT _inference/text_embedding/custom-text-embedding-hf` to create an inference endpoint that performs a text embedding task by using the Qwen/Qwen3-Embedding-8B model.", "method_request": "PUT _inference/text_embedding/custom-text-embedding-hf", "summary": "Custom text embedding task (Hugging Face)", @@ -248857,23 +249136,23 @@ "SimulateIngestRequestExample1": { "alternatives": [ { - "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_id\": 123,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": 456,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n)", + "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_id\": \"123\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": \"456\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n)", "language": "Python" }, { - "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _id: 123,\n _index: \"my-index\",\n _source: {\n foo: \"bar\",\n },\n },\n {\n _id: 456,\n _index: \"my-index\",\n _source: {\n foo: \"rab\",\n },\n },\n ],\n});", + "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _id: \"123\",\n _index: \"my-index\",\n _source: {\n foo: \"bar\",\n },\n },\n {\n _id: \"456\",\n _index: \"my-index\",\n _source: {\n foo: \"rab\",\n },\n },\n ],\n});", "language": "JavaScript" }, { - "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_id\": 123,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": 456,\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ]\n }\n)", + "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_id\": \"123\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_id\": \"456\",\n \"_index\": \"my-index\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ]\n }\n)", "language": "Ruby" }, { - "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_id\" => 123,\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_id\" => 456,\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n ],\n]);", + "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_id\" => \"123\",\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_id\" => \"456\",\n \"_index\" => \"my-index\",\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n ],\n]);", "language": "PHP" }, { - "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_id\":123,\"_index\":\"my-index\",\"_source\":{\"foo\":\"bar\"}},{\"_id\":456,\"_index\":\"my-index\",\"_source\":{\"foo\":\"rab\"}}]}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"", + "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_id\":\"123\",\"_index\":\"my-index\",\"_source\":{\"foo\":\"bar\"}},{\"_id\":\"456\",\"_index\":\"my-index\",\"_source\":{\"foo\":\"rab\"}}]}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"", "language": "curl" }, { @@ -248889,23 +249168,23 @@ "SimulateIngestRequestExample2": { "alternatives": [ { - "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_index\": \"my-index\",\n \"_id\": 123,\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": 456,\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n pipeline_substitutions={\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n },\n)", + "code": "resp = client.simulate.ingest(\n docs=[\n {\n \"_index\": \"my-index\",\n \"_id\": \"123\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": \"456\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n pipeline_substitutions={\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n },\n)", "language": "Python" }, { - "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _index: \"my-index\",\n _id: 123,\n _source: {\n foo: \"bar\",\n },\n },\n {\n _index: \"my-index\",\n _id: 456,\n _source: {\n foo: \"rab\",\n },\n },\n ],\n pipeline_substitutions: {\n \"my-pipeline\": {\n processors: [\n {\n uppercase: {\n field: \"foo\",\n },\n },\n ],\n },\n },\n});", + "code": "const response = await client.simulate.ingest({\n docs: [\n {\n _index: \"my-index\",\n _id: \"123\",\n _source: {\n foo: \"bar\",\n },\n },\n {\n _index: \"my-index\",\n _id: \"456\",\n _source: {\n foo: \"rab\",\n },\n },\n ],\n pipeline_substitutions: {\n \"my-pipeline\": {\n processors: [\n {\n uppercase: {\n field: \"foo\",\n },\n },\n ],\n },\n },\n});", "language": "JavaScript" }, { - "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_index\": \"my-index\",\n \"_id\": 123,\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": 456,\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n \"pipeline_substitutions\": {\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n }\n }\n)", + "code": "response = client.simulate.ingest(\n body: {\n \"docs\": [\n {\n \"_index\": \"my-index\",\n \"_id\": \"123\",\n \"_source\": {\n \"foo\": \"bar\"\n }\n },\n {\n \"_index\": \"my-index\",\n \"_id\": \"456\",\n \"_source\": {\n \"foo\": \"rab\"\n }\n }\n ],\n \"pipeline_substitutions\": {\n \"my-pipeline\": {\n \"processors\": [\n {\n \"uppercase\": {\n \"field\": \"foo\"\n }\n }\n ]\n }\n }\n }\n)", "language": "Ruby" }, { - "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_index\" => \"my-index\",\n \"_id\" => 123,\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_index\" => \"my-index\",\n \"_id\" => 456,\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n \"pipeline_substitutions\" => [\n \"my-pipeline\" => [\n \"processors\" => array(\n [\n \"uppercase\" => [\n \"field\" => \"foo\",\n ],\n ],\n ),\n ],\n ],\n ],\n]);", + "code": "$resp = $client->simulate()->ingest([\n \"body\" => [\n \"docs\" => array(\n [\n \"_index\" => \"my-index\",\n \"_id\" => \"123\",\n \"_source\" => [\n \"foo\" => \"bar\",\n ],\n ],\n [\n \"_index\" => \"my-index\",\n \"_id\" => \"456\",\n \"_source\" => [\n \"foo\" => \"rab\",\n ],\n ],\n ),\n \"pipeline_substitutions\" => [\n \"my-pipeline\" => [\n \"processors\" => array(\n [\n \"uppercase\" => [\n \"field\" => \"foo\",\n ],\n ],\n ),\n ],\n ],\n ],\n]);", "language": "PHP" }, { - "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_index\":\"my-index\",\"_id\":123,\"_source\":{\"foo\":\"bar\"}},{\"_index\":\"my-index\",\"_id\":456,\"_source\":{\"foo\":\"rab\"}}],\"pipeline_substitutions\":{\"my-pipeline\":{\"processors\":[{\"uppercase\":{\"field\":\"foo\"}}]}}}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"", + "code": "curl -X POST -H \"Authorization: ApiKey $ELASTIC_API_KEY\" -H \"Content-Type: application/json\" -d '{\"docs\":[{\"_index\":\"my-index\",\"_id\":\"123\",\"_source\":{\"foo\":\"bar\"}},{\"_index\":\"my-index\",\"_id\":\"456\",\"_source\":{\"foo\":\"rab\"}}],\"pipeline_substitutions\":{\"my-pipeline\":{\"processors\":[{\"uppercase\":{\"field\":\"foo\"}}]}}}' \"$ELASTICSEARCH_URL/_ingest/_simulate\"", "language": "curl" }, { diff --git a/specification/inference/put_ai21/examples/request/PutAi21RequestExample2.yaml b/specification/inference/put_ai21/examples/request/PutAi21RequestExample2.yaml index 63041d1f03..d61f46733c 100644 --- a/specification/inference/put_ai21/examples/request/PutAi21RequestExample2.yaml +++ b/specification/inference/put_ai21/examples/request/PutAi21RequestExample2.yaml @@ -1,5 +1,7 @@ # summary: -description: Run `PUT _inference/chat-completion/ai21-chat-completion` to create a AI21 inference endpoint that performs a `chat_completion` task. +description: + Run `PUT _inference/chat-completion/ai21-chat-completion` to create a AI21 inference endpoint that performs a + `chat_completion` task. method_request: 'PUT _inference/chat-completion/ai21-chat-completion' # type: "request" value: |- diff --git a/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample1.yaml b/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample1.yaml index 006249bbd1..4c93a454d4 100644 --- a/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample1.yaml +++ b/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample1.yaml @@ -1,5 +1,7 @@ summary: A text embedding task -description: Run `PUT _inference/text_embedding/amazon_sagemaker_embeddings` to create an inference endpoint that performs a text embedding task. +description: + Run `PUT _inference/text_embedding/amazon_sagemaker_embeddings` to create an inference endpoint that performs a text + embedding task. method_request: 'PUT _inference/text_embedding/amazon_sagemaker_embeddings' # type: "request" value: |- diff --git a/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample3.yaml b/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample3.yaml index ab12f559a3..88c33789f6 100644 --- a/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample3.yaml +++ b/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample3.yaml @@ -1,5 +1,7 @@ summary: A chat completion task -description: Run `PUT _inference/chat_completion/amazon_sagemaker_chat_completion` to create an inference endpoint that performs a chat completion task. +description: + Run `PUT _inference/chat_completion/amazon_sagemaker_chat_completion` to create an inference endpoint that performs a + chat completion task. method_request: 'PUT _inference/chat_completion/amazon_sagemaker_chat_completion' # type: "request" value: |- diff --git a/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample4.yaml b/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample4.yaml index afac438c22..a26bdef822 100644 --- a/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample4.yaml +++ b/specification/inference/put_amazonsagemaker/examples/request/PutAmazonSageMakerRequestExample4.yaml @@ -1,5 +1,7 @@ summary: A sparse embedding task -description: Run `PUT _inference/sparse_embedding/amazon_sagemaker_sparse_embedding` to create an inference endpoint that performs a sparse embedding task. +description: + Run `PUT _inference/sparse_embedding/amazon_sagemaker_sparse_embedding` to create an inference endpoint that performs a + sparse embedding task. method_request: 'PUT _inference/sparse_embedding/amazon_sagemaker_sparse_embedding' # type: "request" value: |- diff --git a/specification/inference/put_custom/examples/request/PutCustomRequestExample5.yaml b/specification/inference/put_custom/examples/request/PutCustomRequestExample5.yaml index c9f86dad8d..e7896d2bac 100644 --- a/specification/inference/put_custom/examples/request/PutCustomRequestExample5.yaml +++ b/specification/inference/put_custom/examples/request/PutCustomRequestExample5.yaml @@ -1,5 +1,7 @@ summary: Custom text embedding task (Hugging Face) -description: Run `PUT _inference/text_embedding/custom-text-embedding-hf` to create an inference endpoint that performs a text embedding task by using the Qwen/Qwen3-Embedding-8B model. +description: + Run `PUT _inference/text_embedding/custom-text-embedding-hf` to create an inference endpoint that performs a text + embedding task by using the Qwen/Qwen3-Embedding-8B model. method_request: 'PUT _inference/text_embedding/custom-text-embedding-hf' # type: "request" value: |- diff --git a/specification/snapshot/create/SnapshotCreateRequest.ts b/specification/snapshot/create/SnapshotCreateRequest.ts index 5c7ca93bc9..4e449d47db 100644 --- a/specification/snapshot/create/SnapshotCreateRequest.ts +++ b/specification/snapshot/create/SnapshotCreateRequest.ts @@ -25,7 +25,7 @@ import { Duration } from '@_types/Time' * Create a snapshot. * Take a snapshot of a cluster or of data streams and indices. * @rest_spec_name snapshot.create - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges create_snapshot * @doc_id snapshot-create-api diff --git a/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts b/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts index f1fd161b23..427cbf73a8 100644 --- a/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts +++ b/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts @@ -31,7 +31,7 @@ import { Repository } from '@snapshot/_types/SnapshotRepository' * Several options for this API can be specified using a query parameter or a request body parameter. * If both parameters are specified, only the query parameter is used. * @rest_spec_name snapshot.create_repository - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges manage * @doc_id snapshot-repo-create diff --git a/specification/snapshot/delete_repository/SnapshotDeleteRepositoryRequest.ts b/specification/snapshot/delete_repository/SnapshotDeleteRepositoryRequest.ts index 022e7c6751..19ee50fc3d 100644 --- a/specification/snapshot/delete_repository/SnapshotDeleteRepositoryRequest.ts +++ b/specification/snapshot/delete_repository/SnapshotDeleteRepositoryRequest.ts @@ -26,7 +26,7 @@ import { Duration } from '@_types/Time' * When a repository is unregistered, Elasticsearch removes only the reference to the location where the repository is storing the snapshots. * The snapshots themselves are left untouched and in place. * @rest_spec_name snapshot.delete_repository - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges manage * @doc_id snapshot-repo-delete diff --git a/specification/snapshot/get/SnapshotGetRequest.ts b/specification/snapshot/get/SnapshotGetRequest.ts index e509a5e666..a0e7bbfebf 100644 --- a/specification/snapshot/get/SnapshotGetRequest.ts +++ b/specification/snapshot/get/SnapshotGetRequest.ts @@ -32,7 +32,7 @@ import { SnapshotState } from '@snapshot/_types/SnapshotState' * It is guaranteed that any snapshot that exists at the beginning of the iteration and is not concurrently deleted will be seen during the iteration. * Snapshots concurrently created may be seen during an iteration. * @rest_spec_name snapshot.get - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges monitor_snapshot * @doc_id snapshot-get diff --git a/specification/snapshot/get_repository/SnapshotGetRepositoryRequest.ts b/specification/snapshot/get_repository/SnapshotGetRepositoryRequest.ts index c17c9ef80f..7c01fe1fdb 100644 --- a/specification/snapshot/get_repository/SnapshotGetRepositoryRequest.ts +++ b/specification/snapshot/get_repository/SnapshotGetRepositoryRequest.ts @@ -24,7 +24,7 @@ import { Duration } from '@_types/Time' /** * Get snapshot repository information. * @rest_spec_name snapshot.get_repository - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges monitor_snapshot * @doc_id snapshot-repo-get diff --git a/specification/snapshot/restore/SnapshotRestoreRequest.ts b/specification/snapshot/restore/SnapshotRestoreRequest.ts index 2e55e7f6a2..2b5d1ff29d 100644 --- a/specification/snapshot/restore/SnapshotRestoreRequest.ts +++ b/specification/snapshot/restore/SnapshotRestoreRequest.ts @@ -42,7 +42,7 @@ import { IndexSettings } from '@indices/_types/IndexSettings' * * If your snapshot contains data from App Search or Workplace Search, you must restore the Enterprise Search encryption key before you restore the snapshot. * @rest_spec_name snapshot.restore - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges manage * @doc_id snapshot-restore-api diff --git a/specification/snapshot/verify_repository/SnapshotVerifyRepositoryRequest.ts b/specification/snapshot/verify_repository/SnapshotVerifyRepositoryRequest.ts index bd15e40869..b3ac543a90 100644 --- a/specification/snapshot/verify_repository/SnapshotVerifyRepositoryRequest.ts +++ b/specification/snapshot/verify_repository/SnapshotVerifyRepositoryRequest.ts @@ -25,7 +25,7 @@ import { Duration } from '@_types/Time' * Verify a snapshot repository. * Check for common misconfigurations in a snapshot repository. * @rest_spec_name snapshot.verify_repository - * @availability stack since=0.0.0 stability=stable + * @availability stack stability=stable * @availability serverless stability=stable visibility=private * @cluster_privileges manage * @doc_id snapshot-repo-verify