From c03c3201b5c661a2705d5d5e7d987f2240632f6b Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Thu, 31 Jul 2025 15:32:05 +0000 Subject: [PATCH] Regenerate client from commit b14c9da of spec repo --- .generated-info | 4 ++-- .generator/schemas/v2/openapi.yaml | 10 ++++++++++ .../observability_pipeline_config_processor_item.py | 6 +++--- .../model/observability_pipeline_sample_processor.py | 11 +++++++++++ 4 files changed, 26 insertions(+), 5 deletions(-) diff --git a/.generated-info b/.generated-info index 6bc2a4eb83..f57f6fc43d 100644 --- a/.generated-info +++ b/.generated-info @@ -1,4 +1,4 @@ { - "spec_repo_commit": "b75095c", - "generated": "2025-07-31 10:46:07.850" + "spec_repo_commit": "b14c9da", + "generated": "2025-07-31 15:32:05.142" } diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index b125b71c69..ab031f4a84 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -27480,6 +27480,16 @@ components: description: The `sample` processor allows probabilistic sampling of logs at a fixed rate. properties: + group_by: + description: Optional list of fields to group events by. Each group will + be sampled independently + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index a761a8beb8..e4e7787c42 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -61,6 +61,9 @@ def __init__(self, **kwargs): :param metrics: Configuration for generating individual metrics. :type metrics: [ObservabilityPipelineGeneratedMetric] + :param group_by: Optional list of fields to group events by. Each group will be sampled independently + :type group_by: [str], optional + :param percentage: The percentage of logs to sample. :type percentage: float, optional @@ -91,9 +94,6 @@ def __init__(self, **kwargs): :param target: Path where enrichment results should be stored in the log. :type target: str - :param group_by: A list of fields used to group log events for merging. - :type group_by: [str] - :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py index e6f16ce99d..3eb78755b7 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py @@ -21,6 +21,9 @@ class ObservabilityPipelineSampleProcessor(ModelNormal): validations = { + "group_by": { + "min_items": 1, + }, "rate": { "inclusive_minimum": 1, }, @@ -33,6 +36,7 @@ def openapi_types(_): ) return { + "group_by": ([str],), "id": (str,), "include": (str,), "inputs": ([str],), @@ -42,6 +46,7 @@ def openapi_types(_): } attribute_map = { + "group_by": "group_by", "id": "id", "include": "include", "inputs": "inputs", @@ -56,6 +61,7 @@ def __init__( include: str, inputs: List[str], type: ObservabilityPipelineSampleProcessorType, + group_by: Union[List[str], UnsetType] = unset, percentage: Union[float, UnsetType] = unset, rate: Union[int, UnsetType] = unset, **kwargs, @@ -63,6 +69,9 @@ def __init__( """ The ``sample`` processor allows probabilistic sampling of logs at a fixed rate. + :param group_by: Optional list of fields to group events by. Each group will be sampled independently + :type group_by: [str], optional + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str @@ -81,6 +90,8 @@ def __init__( :param type: The processor type. The value should always be ``sample``. :type type: ObservabilityPipelineSampleProcessorType """ + if group_by is not unset: + kwargs["group_by"] = group_by if percentage is not unset: kwargs["percentage"] = percentage if rate is not unset: